hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
180f13d03719529ce41a74f5116be50658d35b85 | 708 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
// error-pattern: instantiating a type parameter with an incompatible type
struct S<T:Freeze> {
s: T,
cant_nest: ()
}
fn main() {
let a1 = ~S{ s: true, cant_nest: () };
let _a2 = ~S{ s: a1, cant_nest: () };
}
| 32.181818 | 74 | 0.689266 |
9197442bc90e5cc003c7c560307c8f459bf446e2 | 8,914 | // Generated from definition io.k8s.api.core.v1.PersistentVolumeClaimSpec
/// PersistentVolumeClaimSpec describes the common attributes of storage devices and allows a Source for provider-specific attributes
#[derive(Clone, Debug, Default, PartialEq)]
pub struct PersistentVolumeClaimSpec {
/// AccessModes contains the desired access modes the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1
pub access_modes: Vec<String>,
/// This field can be used to specify either: * An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot - Beta) * An existing PVC (PersistentVolumeClaim) * An existing custom resource/object that implements data population (Alpha) In order to use VolumeSnapshot object types, the appropriate feature gate must be enabled (VolumeSnapshotDataSource or AnyVolumeDataSource) If the provisioner or an external controller can support the specified data source, it will create a new volume based on the contents of the specified data source. If the specified data source is not supported, the volume will not be created and the failure will be reported as an event. In the future, we plan to support more data source types and the behavior of the provisioner may change.
pub data_source: Option<crate::api::core::v1::TypedLocalObjectReference>,
/// Resources represents the minimum resources the volume should have. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources
pub resources: Option<crate::api::core::v1::ResourceRequirements>,
/// A label query over volumes to consider for binding.
pub selector: Option<crate::apimachinery::pkg::apis::meta::v1::LabelSelector>,
/// Name of the StorageClass required by the claim. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1
pub storage_class_name: Option<String>,
/// volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec.
pub volume_mode: Option<String>,
/// VolumeName is the binding reference to the PersistentVolume backing this claim.
pub volume_name: Option<String>,
}
impl<'de> crate::serde::Deserialize<'de> for PersistentVolumeClaimSpec {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_access_modes,
Key_data_source,
Key_resources,
Key_selector,
Key_storage_class_name,
Key_volume_mode,
Key_volume_name,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"accessModes" => Field::Key_access_modes,
"dataSource" => Field::Key_data_source,
"resources" => Field::Key_resources,
"selector" => Field::Key_selector,
"storageClassName" => Field::Key_storage_class_name,
"volumeMode" => Field::Key_volume_mode,
"volumeName" => Field::Key_volume_name,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = PersistentVolumeClaimSpec;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("PersistentVolumeClaimSpec")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_access_modes: Option<Vec<String>> = None;
let mut value_data_source: Option<crate::api::core::v1::TypedLocalObjectReference> = None;
let mut value_resources: Option<crate::api::core::v1::ResourceRequirements> = None;
let mut value_selector: Option<crate::apimachinery::pkg::apis::meta::v1::LabelSelector> = None;
let mut value_storage_class_name: Option<String> = None;
let mut value_volume_mode: Option<String> = None;
let mut value_volume_name: Option<String> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_access_modes => value_access_modes = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_data_source => value_data_source = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_resources => value_resources = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_selector => value_selector = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_storage_class_name => value_storage_class_name = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_volume_mode => value_volume_mode = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_volume_name => value_volume_name = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(PersistentVolumeClaimSpec {
access_modes: value_access_modes.unwrap_or_default(),
data_source: value_data_source,
resources: value_resources,
selector: value_selector,
storage_class_name: value_storage_class_name,
volume_mode: value_volume_mode,
volume_name: value_volume_name,
})
}
}
deserializer.deserialize_struct(
"PersistentVolumeClaimSpec",
&[
"accessModes",
"dataSource",
"resources",
"selector",
"storageClassName",
"volumeMode",
"volumeName",
],
Visitor,
)
}
}
impl crate::serde::Serialize for PersistentVolumeClaimSpec {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
"PersistentVolumeClaimSpec",
usize::from(!self.access_modes.is_empty()) +
self.data_source.as_ref().map_or(0, |_| 1) +
self.resources.as_ref().map_or(0, |_| 1) +
self.selector.as_ref().map_or(0, |_| 1) +
self.storage_class_name.as_ref().map_or(0, |_| 1) +
self.volume_mode.as_ref().map_or(0, |_| 1) +
self.volume_name.as_ref().map_or(0, |_| 1),
)?;
if !self.access_modes.is_empty() {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "accessModes", &self.access_modes)?;
}
if let Some(value) = &self.data_source {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "dataSource", value)?;
}
if let Some(value) = &self.resources {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "resources", value)?;
}
if let Some(value) = &self.selector {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "selector", value)?;
}
if let Some(value) = &self.storage_class_name {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "storageClassName", value)?;
}
if let Some(value) = &self.volume_mode {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "volumeMode", value)?;
}
if let Some(value) = &self.volume_name {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "volumeName", value)?;
}
crate::serde::ser::SerializeStruct::end(state)
}
}
| 53.698795 | 785 | 0.601862 |
5dab787cba7d9071872dbe695ee2fd19b71f8ee2 | 22,482 | use self::{
module::ModuleEntryIter,
thread::{ThreadIdIter, ThreadIter},
};
pub use self::{
module::{Module, ModuleEntry, ModuleInfo},
thread::{PriorityLevel, Thread},
};
use crate::{Error, Handle, WinResult};
use bitflags::bitflags;
use std::{
ffi::{OsStr, OsString},
mem,
ops::Deref,
os::windows::{
io::{AsRawHandle, FromRawHandle, IntoRawHandle, RawHandle},
prelude::*,
},
path::PathBuf,
};
use widestring::WideCString;
use winapi::{
shared::{
basetsd::DWORD_PTR,
minwindef::{DWORD, HMODULE, MAX_PATH},
},
um::{
handleapi::INVALID_HANDLE_VALUE,
libloaderapi::GetModuleHandleW,
processthreadsapi::{
GetCurrentProcess,
GetExitCodeProcess,
GetPriorityClass,
GetProcessId,
OpenProcess,
SetPriorityClass,
TerminateProcess,
},
psapi::{EnumProcessModulesEx, LIST_MODULES_ALL},
tlhelp32::{
CreateToolhelp32Snapshot,
Process32Next,
PROCESSENTRY32,
TH32CS_SNAPMODULE,
TH32CS_SNAPMODULE32,
TH32CS_SNAPPROCESS,
TH32CS_SNAPTHREAD,
},
winbase::{
GetProcessAffinityMask,
QueryFullProcessImageNameW,
SetProcessAffinityMask,
ABOVE_NORMAL_PRIORITY_CLASS,
BELOW_NORMAL_PRIORITY_CLASS,
HIGH_PRIORITY_CLASS,
IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS,
PROCESS_MODE_BACKGROUND_BEGIN,
PROCESS_MODE_BACKGROUND_END,
REALTIME_PRIORITY_CLASS,
},
winnt::{self, PROCESS_ALL_ACCESS, WCHAR},
},
};
mod module;
mod thread;
/// A handle to a running process.
#[derive(Debug)]
pub struct Process {
handle: Handle,
parent_id: Option<u32>,
}
impl Process {
/// Creates a process handle from a PID. Requests all access permissions.
pub fn from_id(id: u32) -> WinResult<Process> {
unsafe {
let handle = OpenProcess(PROCESS_ALL_ACCESS, 0, id);
if handle.is_null() {
Err(Error::last_os_error())
} else {
Ok(Process {
handle: Handle::new(handle),
parent_id: None,
})
}
}
}
/// Creates a process handle from a PID. Requests the specified access permissions.
pub fn from_id_with_access(
id: u32,
parent_id: Option<u32>,
access: Access,
) -> WinResult<Process> {
unsafe {
let handle = OpenProcess(access.bits, 0, id);
if handle.is_null() {
Err(Error::last_os_error())
} else {
Ok(Process {
handle: Handle::new(handle),
parent_id,
})
}
}
}
/// Creates a process handle from a name. Requests all access.
pub fn from_name(name: &str) -> WinResult<Process> {
Process::all()?
.find(|p| p.name().map(|n| n == name).unwrap_or(false))
.ok_or(Error::NoProcess(name.to_string()))
}
/// Creates a process handle from a name.
pub fn from_name_with_access(name: &str, access: Access) -> WinResult<Process> {
Process::all_with_access(access)?
.find(|p| p.name().map(|n| n == name).unwrap_or(false))
.ok_or(Error::NoProcess(name.to_string()))
}
/// Creates a process handle from a handle.
pub fn from_handle(handle: Handle) -> Process {
Process {
handle,
parent_id: None,
}
}
/// Returns a handle to the current process.
pub fn current() -> Process {
unsafe { Process::from_handle(Handle::from_raw_handle(GetCurrentProcess() as RawHandle)) }
}
/// Returns a reference to the inner handle.
pub fn handle(&self) -> &Handle {
&self.handle
}
/// Enumerates all running processes. Requests all access.
pub fn all() -> WinResult<impl Iterator<Item = Process>> {
unsafe {
let snap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
if snap == INVALID_HANDLE_VALUE {
Err(Error::last_os_error())
} else {
Ok(ProcessIter {
snapshot: Handle::new(snap),
access: Access::PROCESS_ALL_ACCESS,
}
.filter_map(Result::ok))
}
}
}
/// Enumerates all running processes.
pub fn all_with_access(access: Access) -> WinResult<impl Iterator<Item = Process>> {
unsafe {
let snap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
if snap == INVALID_HANDLE_VALUE {
Err(Error::last_os_error())
} else {
Ok(ProcessIter {
snapshot: Handle::new(snap),
access,
}
.filter_map(Result::ok))
}
}
}
/// Returns the process's id.
pub fn id(&self) -> u32 {
unsafe { GetProcessId(self.handle.as_raw_handle() as winnt::HANDLE) }
}
/// Returns the id of the parent process.
///
/// To use this method, you have to have this truct be instantiated by either `Process::all(..)` or
/// `Process::all_with_access(..)`.
pub fn parent_id(&self) -> Option<u32> {
self.parent_id
}
/// Returns true if the process is running.
pub fn is_running(&self) -> bool {
unsafe {
let mut status = 0;
GetExitCodeProcess(self.handle.as_raw_handle() as winnt::HANDLE, &mut status);
status == 259
}
}
/// Returns the path of the executable of the process.
pub fn path(&self) -> WinResult<PathBuf> {
unsafe {
let mut size = MAX_PATH as u32;
let mut buffer: [WCHAR; MAX_PATH] = mem::zeroed();
let ret = QueryFullProcessImageNameW(
self.handle.as_raw_handle() as winnt::HANDLE,
0,
buffer.as_mut_ptr(),
&mut size,
);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(OsString::from_wide(&buffer[0..size as usize]).into())
}
}
}
/// Returns the unqualified name of the executable of the process.
pub fn name(&self) -> WinResult<String> {
Ok(self
.path()?
.file_name()
.unwrap()
.to_string_lossy()
.into_owned())
}
/// Returns the priority class of the process.
///
/// The handle must have the `PROCESS_QUERY_INFORMATION` or `PROCESS_QUERY_LIMITED_INFORMATION`
/// access right.
pub fn priority(&self) -> WinResult<PriorityClass> {
unsafe {
let ret = GetPriorityClass(self.handle.as_raw_handle() as winnt::HANDLE);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(PriorityClass::from_code(ret))
}
}
}
/// Sets the priority class of the process.
///
/// The handle must have the `PROCESS_SET_INFORMATION` access right.
pub fn set_priority(&mut self, priority: PriorityClass) -> WinResult {
unsafe {
let ret = SetPriorityClass(
self.handle.as_raw_handle() as winnt::HANDLE,
priority.as_code(),
);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
/// Begins background processing mode.
///
/// **This can be initiated only if the handle refers to the current process.**
///
/// The system lowers the resource scheduling priorities of the process (and its threads) so
/// that it can perform background work without significantly affecting activity in
/// the foreground.
///
/// The function fails if the process is already in background processing mode.
///
/// The handle must have the `PROCESS_SET_INFORMATION` access right.
pub fn start_background_mode(&mut self) -> WinResult {
unsafe {
let ret = SetPriorityClass(
self.handle.as_raw_handle() as winnt::HANDLE,
PROCESS_MODE_BACKGROUND_BEGIN,
);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
/// Ends background processing mode.
///
/// **This can be initiated only if the handle refers to the current process.**
///
/// The system restores the resource scheduling priorities of the process (and its threads) as
/// they were before the process entered background processing mode.
///
/// The function fails if the process is not in background processing mode.
///
/// The handle must have the `PROCESS_SET_INFORMATION` access right.
pub fn end_background_mode(&mut self) -> WinResult {
unsafe {
let ret = SetPriorityClass(
self.handle.as_raw_handle() as winnt::HANDLE,
PROCESS_MODE_BACKGROUND_END,
);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
/// Terminates the process.
///
/// The handle must have the `PROCESS_TERMINATE` access right.
pub fn terminate(&mut self, exit_code: u32) -> WinResult {
unsafe {
let ret = TerminateProcess(self.handle.as_raw_handle() as winnt::HANDLE, exit_code);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
/// Returns the affinity mask of the process.
pub fn affinity_mask(&self) -> WinResult<usize> {
unsafe {
let mut process_mask: DWORD_PTR = 0;
let mut system_mask: DWORD_PTR = 0;
let ret = GetProcessAffinityMask(
self.handle.as_raw_handle() as winnt::HANDLE,
&mut process_mask,
&mut system_mask,
);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(process_mask as usize)
}
}
}
/// Sets the affinity mask of the process.
///
/// A process affinity mask is a bit vector in which each bit represents a logical processor
/// that a process is allowed to run on.
///
/// Setting an affinity mask for a process or thread can result in threads receiving less
/// processor time, as the system is restricted from running the threads on certain processors.
/// In most cases, it is better to let the system select an available processor.
///
/// If the new process affinity mask does not specify the processor that is currently running
/// the process, the process is rescheduled on one of the allowable processors.
pub fn set_affinity_mask(&mut self, mask: u32) -> WinResult {
unsafe {
let ret = SetProcessAffinityMask(self.handle.as_raw_handle() as winnt::HANDLE, mask);
if ret == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
}
}
// /// Sets the affinity of the process to the single specified processor.
// ///
// /// If the processor index equals or exceeds the width of [`DWORD`], the mask is not changed.
// pub fn set_affinity(&mut self, processor: u8) -> WinResult {
// if (processor as usize) < mem::size_of::<u32>() * 8 {
// self.set_affinity_mask(1 << processor as u32)
// } else {
// Ok(())
// }
// }
/// Returns an iterator over the threads of the process.
pub fn threads<'a>(&'a self) -> WinResult<impl Iterator<Item = Thread> + 'a> {
unsafe {
let snap = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, 0);
if snap == INVALID_HANDLE_VALUE {
Err(Error::last_os_error())
} else {
Ok(ThreadIter {
process: &self,
snapshot: Handle::new(snap),
}
.filter_map(Result::ok))
}
}
}
/// Returns an iterator over the ids of threads of the process.
pub fn thread_ids<'a>(&'a self) -> WinResult<impl Iterator<Item = u32> + 'a> {
unsafe {
let snap = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, 0);
if snap == INVALID_HANDLE_VALUE {
Err(Error::last_os_error())
} else {
Ok(ThreadIdIter {
process: &self,
snapshot: Handle::new(snap),
})
}
}
}
/// Returns the loaded module with the specified name/path.
pub fn module<N: AsRef<OsStr>>(&self, name: N) -> WinResult<Module> {
unsafe {
let name = WideCString::from_os_str(name).map_err(|e| Error::NulErrorW {
pos: e.nul_position(),
data: e.into_vec(),
})?;
let ret = GetModuleHandleW(name.as_ptr());
if ret.is_null() {
Err(Error::last_os_error())
} else {
Ok(Module {
handle: ret,
process: self,
})
}
}
}
/// Returns a list of the modules of the process.
pub fn module_list(&self) -> WinResult<Vec<Module>> {
unsafe {
let mut mod_handles = Vec::new();
let mut reserved = 0;
let mut needed = 0;
{
let enum_mods = |mod_handles: &mut [HMODULE], needed| {
let res = EnumProcessModulesEx(
self.as_raw_handle() as winnt::HANDLE,
mod_handles.as_mut_ptr(),
mem::size_of_val(&mod_handles[..]) as _,
needed,
LIST_MODULES_ALL,
);
if res == 0 {
Err(Error::last_os_error())
} else {
Ok(())
}
};
loop {
enum_mods(&mut mod_handles, &mut needed)?;
if needed <= reserved {
break;
}
reserved = needed;
mod_handles.resize(needed as usize, mem::zeroed());
}
}
let modules = mod_handles[..needed as usize / mem::size_of::<HMODULE>()]
.iter()
.map(|&handle| Module {
handle,
process: self,
})
.collect();
Ok(modules)
}
}
/// Returns an iterator over the modules of the process.
pub fn module_entries<'a>(&'a self) -> WinResult<impl Iterator<Item = ModuleEntry> + 'a> {
unsafe {
let snap = CreateToolhelp32Snapshot(TH32CS_SNAPMODULE | TH32CS_SNAPMODULE32, self.id());
if snap == INVALID_HANDLE_VALUE {
Err(Error::last_os_error())
} else {
Ok(ModuleEntryIter {
process: &self,
snapshot: Handle::new(snap),
})
}
}
}
}
impl AsRawHandle for Process {
fn as_raw_handle(&self) -> RawHandle {
self.handle.as_raw_handle()
}
}
impl Deref for Process {
type Target = winnt::HANDLE;
fn deref(&self) -> &winnt::HANDLE {
&*self.handle
}
}
impl FromRawHandle for Process {
unsafe fn from_raw_handle(handle: RawHandle) -> Process {
Process {
handle: Handle::new(handle as winnt::HANDLE),
parent_id: None,
}
}
}
impl IntoRawHandle for Process {
fn into_raw_handle(self) -> RawHandle {
self.handle.into_raw_handle()
}
}
#[derive(Debug)]
struct ProcessIter {
snapshot: Handle,
access: Access,
}
impl Iterator for ProcessIter {
type Item = WinResult<Process>;
fn next(&mut self) -> Option<WinResult<Process>> {
unsafe {
let mut entry: PROCESSENTRY32 = mem::zeroed();
entry.dwSize = mem::size_of::<PROCESSENTRY32>() as DWORD;
let ret = Process32Next(self.snapshot.as_raw_handle() as winnt::HANDLE, &mut entry);
if ret == 0 {
None
} else {
Some(Process::from_id_with_access(
entry.th32ProcessID,
Some(entry.th32ParentProcessID),
self.access,
))
}
}
}
}
bitflags! {
/// Windows process-related access permission flags.
pub struct Access: u32 {
/// Required to delete the object.
const DELETE = winnt::DELETE;
/// Required to read information in the security descriptor for the object, not including
/// the information in the SACL. To read or write the SACL, you must request the
/// `ACCESS_SYSTEM_SECURITY` access right. For more information, see [SACL Access Right](https://msdn.microsoft.com/en-us/library/windows/desktop/aa379321\(v=vs.85\).aspx).
const READ_CONTROL = winnt::READ_CONTROL;
/// Required to modify the DACL in the security descriptor for the object.
const WRITE_DAC = winnt::WRITE_DAC;
/// Required to change the owner in the security descriptor for the object.
const WRITE_OWNER = winnt::WRITE_OWNER;
/// The right to use the object for synchronization.
/// This enables a thread to wait until the object is in the signaled state.
const SYNCHRONIZE = winnt::SYNCHRONIZE;
/// Union of `DELETE | READ_CONTROL | WRITE_DAC | WRITE_OWNER`.
const STANDARD_RIGHTS_REQUIRED = winnt::STANDARD_RIGHTS_REQUIRED;
/// Required to terminate a process.
const PROCESS_TERMINATE = winnt::PROCESS_TERMINATE;
/// Required to create a thread.
const PROCESS_CREATE_THREAD = winnt::PROCESS_CREATE_THREAD;
const PROCESS_SET_SESSIONID = winnt::PROCESS_SET_SESSIONID;
/// Required to perform an operation on the address space of a process.
const PROCESS_VM_OPERATION = winnt::PROCESS_VM_OPERATION;
/// Required to read memory in a process.
const PROCESS_VM_READ = winnt::PROCESS_VM_READ;
/// Required to write to memory in a process.
const PROCESS_VM_WRITE = winnt::PROCESS_VM_WRITE;
/// Required to duplicate a handle.
const PROCESS_DUP_HANDLE = winnt::PROCESS_DUP_HANDLE;
/// Required to create a process.
const PROCESS_CREATE_PROCESS = winnt::PROCESS_CREATE_PROCESS;
/// Required to set memory limits.
const PROCESS_SET_QUOTA = winnt::PROCESS_SET_QUOTA;
/// Required to set certain information about a process, such as its priority class.
const PROCESS_SET_INFORMATION = winnt::PROCESS_SET_INFORMATION;
/// Required to retrieve certain information about a process, such as its token,
/// exit code, and priority class.
const PROCESS_QUERY_INFORMATION = winnt::PROCESS_QUERY_INFORMATION;
/// Required to suspend or resume a process.
const PROCESS_SUSPEND_RESUME = winnt::PROCESS_SUSPEND_RESUME;
/// Required to retrieve certain information about a process
/// (exit code, priority class,job status, path).
///
/// A handle that has the `PROCESS_QUERY_INFORMATION` access right is
/// automatically granted `PROCESS_QUERY_LIMITED_INFORMATION`.
const PROCESS_QUERY_LIMITED_INFORMATION = winnt::PROCESS_QUERY_LIMITED_INFORMATION;
const PROCESS_SET_LIMITED_INFORMATION = winnt::PROCESS_SET_LIMITED_INFORMATION;
/// All possible access rights for a process object.
const PROCESS_ALL_ACCESS = Self::STANDARD_RIGHTS_REQUIRED.bits | Self::SYNCHRONIZE.bits | 0xffff;
}
}
impl Default for Access {
/// Returns `Access::PROCESS_ALL_ACCESS`.
fn default() -> Access {
Access::PROCESS_ALL_ACCESS
}
}
/// A process scheduling priority class.
///
/// See [Scheduling Priorities](https://docs.microsoft.com/en-us/windows/desktop/procthread/scheduling-priorities)
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum PriorityClass {
Idle,
BelowNormal,
Normal,
AboveNormal,
High,
Realtime,
}
impl PriorityClass {
fn from_code(code: DWORD) -> PriorityClass {
match code {
IDLE_PRIORITY_CLASS => PriorityClass::Idle,
BELOW_NORMAL_PRIORITY_CLASS => PriorityClass::BelowNormal,
NORMAL_PRIORITY_CLASS => PriorityClass::Normal,
ABOVE_NORMAL_PRIORITY_CLASS => PriorityClass::AboveNormal,
HIGH_PRIORITY_CLASS => PriorityClass::High,
REALTIME_PRIORITY_CLASS => PriorityClass::Realtime,
_ => panic!("Unexpected priority code: {}", code),
}
}
fn as_code(&self) -> DWORD {
match self {
PriorityClass::Idle => IDLE_PRIORITY_CLASS,
PriorityClass::BelowNormal => BELOW_NORMAL_PRIORITY_CLASS,
PriorityClass::Normal => NORMAL_PRIORITY_CLASS,
PriorityClass::AboveNormal => ABOVE_NORMAL_PRIORITY_CLASS,
PriorityClass::High => HIGH_PRIORITY_CLASS,
PriorityClass::Realtime => REALTIME_PRIORITY_CLASS,
}
}
}
impl Default for PriorityClass {
fn default() -> PriorityClass {
PriorityClass::Normal
}
}
//mod tests {
// #[allow(unused_imports)]
// use super::*;
//
// #[test]
// fn enumerates_processes() {
// let procs: Vec<_> = Process::all().unwrap().collect();
// assert_eq!(procs.is_empty(), false);
// println!("{:?}", procs);
// }
//
// #[test]
// fn accesses_process_names() {
// let names: Vec<_> = Process::all()
// .unwrap()
// .filter_map(|p| p.name().ok())
// .collect();
// assert_eq!(names.is_empty(), false);
// println!("{:?}", names);
// }
//}
| 33.858434 | 180 | 0.55391 |
abeb1ef1ea0b02ad629fbef981357c6e9e1c90cf | 445 | use amethyst_assets::Handle;
use amethyst_core::ecs::{Component, DenseVecStorage};
use amethyst_rendy::Texture;
/// Image used UI widgets, often as background.
#[derive(Debug, Clone, PartialEq)]
pub enum UiImage {
/// An image backed by texture handle
Texture(Handle<Texture>),
/// An image entirely covered by single solid color
SolidColor([f32; 4]),
}
impl Component for UiImage {
type Storage = DenseVecStorage<Self>;
}
| 26.176471 | 55 | 0.719101 |
7a19ee5de4201c9279d2fd8201b31b6de56b4e17 | 4,555 | //! COM runtime facilities
//!
//! This includes initializing the COM runtime as well as creating instances of COM classes
use crate::sys::{
CoCreateInstance, CoGetClassObject, CoIncrementMTAUsage, CoInitializeEx, CoUninitialize,
CLSCTX_INPROC_SERVER, CLSID, COINIT_APARTMENTTHREADED, COINIT_MULTITHREADED, FAILED, HRESULT,
IID, S_FALSE, S_OK,
};
use std::ffi::c_void;
use crate::Interface;
/// Initialize a new multithreaded apartment (MTA) runtime. This will ensure
/// that an MTA is running for the process. Every new thread will implicitly
/// be in the MTA unless a different apartment type is chosen (through [`init_apartment`])
///
/// This calls `CoIncrementMTAUsage`
///
/// This function only needs to be called once per process.
pub fn init_runtime() -> Result<(), HRESULT> {
let mut _cookie = std::ptr::null_mut::<c_void>();
match unsafe { CoIncrementMTAUsage(&mut _cookie as *mut _ as *mut _) } {
// S_OK indicates the runtime was initialized
S_OK => Ok(()),
// Any other result is considered an error here.
hr => Err(hr),
}
}
/// The threading model of the current thread's apartment
#[repr(u32)]
#[non_exhaustive]
pub enum ApartmentType {
/// A single-threaded apartment (COINIT_APARTMENTTHREADED)
SingleThreaded = COINIT_APARTMENTTHREADED,
/// A multi-threaded apartment (COINIT_MULTITHREADED)
Multithreaded = COINIT_MULTITHREADED,
}
/// Establish an apartment type for the current thread.
///
/// This can only be called once per thread and will return an error if
/// it is called more than once.
///
/// In general this should only be called on threads created by the user.
///
/// This wraps `CoInitializeEx`. The user is still responsible for establishing
/// a message pump in the case of an STA
// TODO: create a special `spawn` function for spawning a thread
// with a specific apartment type.
// TODO: add helpers for establishing a message pump
pub fn init_apartment(apartment_type: ApartmentType) -> Result<(), HRESULT> {
match unsafe { CoInitializeEx(std::ptr::null_mut::<c_void>(), apartment_type as u32) } {
// S_OK indicates the runtime was initialized
S_OK | S_FALSE => Ok(()),
// Any other result is considered an error here.
hr => Err(hr),
}
}
/// Uninitialize a COM apartment thread.
///
/// This uses `CoUninitialize`
///
/// This should only be called if the user already initialized the thread as a specific apartment type
/// (usually started through [`init_apartment`]).
/// https://docs.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-couninitialize
pub fn deinit_apartment() {
unsafe { CoUninitialize() }
}
/// An apartment runtime configuration.
///
/// This initializes a thread as a certain [`ApartmentType`] and uninitializes on `drop`
pub struct ApartmentRuntime {
_priv: *const (), // Ensure that this struct is !Send
}
impl ApartmentRuntime {
/// Initialize the thread as an [`ApartmentType`]
pub fn new(apartment_type: ApartmentType) -> Result<Self, HRESULT> {
init_apartment(apartment_type)?;
Ok(Self {
_priv: std::ptr::null(),
})
}
}
impl Drop for ApartmentRuntime {
fn drop(&mut self) {
deinit_apartment()
}
}
/// Get the class object with the associated [`CLSID`]
///
/// Calls `CoGetClassObject` internally
pub fn get_class_object<T: Interface>(class_id: &CLSID) -> Result<T, HRESULT> {
let mut class = None;
let hr = unsafe {
CoGetClassObject(
class_id as *const CLSID,
CLSCTX_INPROC_SERVER,
std::ptr::null_mut::<c_void>(),
&T::IID as *const IID,
&mut class as *mut _ as _,
)
};
if FAILED(hr) {
return Err(hr);
}
Ok(class.unwrap())
}
/// Create an instance of a COM class with the associated class id
///
/// Calls `CoCreateInstance` internally
pub fn create_instance<T: Interface>(class_id: &CLSID) -> Result<T, HRESULT> {
unsafe { Ok(create_raw_instance::<T>(class_id, std::ptr::null_mut())?) }
}
/// A helper for creating both regular and aggregated instances
unsafe fn create_raw_instance<T: Interface>(
class_id: &CLSID,
outer: *mut c_void,
) -> Result<T, HRESULT> {
let mut instance = None;
let hr = CoCreateInstance(
class_id as *const CLSID,
outer,
CLSCTX_INPROC_SERVER,
&T::IID as *const IID,
&mut instance as *mut _ as _,
);
if FAILED(hr) {
return Err(hr);
}
Ok(instance.unwrap())
}
| 32.077465 | 102 | 0.669155 |
ebdc3bb9c09baf670fd9f0ce05a22c8d0ef7b497 | 1,550 | use regex::Regex;
fn parse(data: &str) -> Vec<i64> {
let re = Regex::new(r"-?\d+").unwrap();
re.find_iter(data)
.map(|c| c.as_str().parse::<i64>().unwrap())
.collect()
}
fn shoot(mut vx: i64, mut vy: i64, limits: &[i64]) -> (bool, i64) {
let mut x = 0i64;
let mut y = 0;
let is_within =
|x: i64, y: i64| x >= limits[0] && x <= limits[1] && y >= limits[2] && y <= limits[3];
let mut highest = y;
while y >= limits[2] {
x += vx;
y += vy;
vx -= vx.signum();
vy -= 1;
highest = highest.max(y);
if is_within(x, y) {
return (true, highest);
}
}
(false, 0)
}
fn solve(limits: &[i64]) -> (i64, usize) {
let mut p1 = 0;
let mut p2 = 0;
let y_max = limits[2].abs().max(limits[3].abs());
for vx in 1..=limits[1] {
for vy in -y_max..=y_max {
let r = shoot(vx, vy, limits);
if r.0 {
p1 = p1.max(r.1);
p2 += 1;
}
}
}
(p1, p2)
}
pub fn main() {
let data = std::fs::read_to_string("data/2021/day17").unwrap();
let limits = parse(&data);
let (p1, p2) = solve(&limits);
println!("day17 part1: {}", p1);
println!("day17 part2: {}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn case1() {
let data = "target area: x=20..30, y=-10..-5";
let limits = parse(&data);
let (p1, p2) = solve(&limits);
assert_eq!(45, p1);
assert_eq!(112, p2);
}
}
| 22.794118 | 94 | 0.455484 |
d9f1b6a3240a43c27c0c226459ec8e600cf255c4 | 4,552 | #[doc = "Register `PRO_DCACHE_MEM_SYNC1` reader"]
pub struct R(crate::R<PRO_DCACHE_MEM_SYNC1_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PRO_DCACHE_MEM_SYNC1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PRO_DCACHE_MEM_SYNC1_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PRO_DCACHE_MEM_SYNC1_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PRO_DCACHE_MEM_SYNC1` writer"]
pub struct W(crate::W<PRO_DCACHE_MEM_SYNC1_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PRO_DCACHE_MEM_SYNC1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PRO_DCACHE_MEM_SYNC1_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PRO_DCACHE_MEM_SYNC1_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `PRO_DCACHE_MEMSYNC_SIZE` reader - The bits are used to configure the length for invalidate, flush, clean, lock and unlock operations. The manual operations will be issued if it is validate. The auto operations will be issued if it is invalidate. It should be combined with PRO_DCACHE_MEM_SYNC0."]
pub struct PRO_DCACHE_MEMSYNC_SIZE_R(crate::FieldReader<u32, u32>);
impl PRO_DCACHE_MEMSYNC_SIZE_R {
#[inline(always)]
pub(crate) fn new(bits: u32) -> Self {
PRO_DCACHE_MEMSYNC_SIZE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PRO_DCACHE_MEMSYNC_SIZE_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PRO_DCACHE_MEMSYNC_SIZE` writer - The bits are used to configure the length for invalidate, flush, clean, lock and unlock operations. The manual operations will be issued if it is validate. The auto operations will be issued if it is invalidate. It should be combined with PRO_DCACHE_MEM_SYNC0."]
pub struct PRO_DCACHE_MEMSYNC_SIZE_W<'a> {
w: &'a mut W,
}
impl<'a> PRO_DCACHE_MEMSYNC_SIZE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0007_ffff) | (value as u32 & 0x0007_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:18 - The bits are used to configure the length for invalidate, flush, clean, lock and unlock operations. The manual operations will be issued if it is validate. The auto operations will be issued if it is invalidate. It should be combined with PRO_DCACHE_MEM_SYNC0."]
#[inline(always)]
pub fn pro_dcache_memsync_size(&self) -> PRO_DCACHE_MEMSYNC_SIZE_R {
PRO_DCACHE_MEMSYNC_SIZE_R::new((self.bits & 0x0007_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:18 - The bits are used to configure the length for invalidate, flush, clean, lock and unlock operations. The manual operations will be issued if it is validate. The auto operations will be issued if it is invalidate. It should be combined with PRO_DCACHE_MEM_SYNC0."]
#[inline(always)]
pub fn pro_dcache_memsync_size(&mut self) -> PRO_DCACHE_MEMSYNC_SIZE_W {
PRO_DCACHE_MEMSYNC_SIZE_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "register description\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pro_dcache_mem_sync1](index.html) module"]
pub struct PRO_DCACHE_MEM_SYNC1_SPEC;
impl crate::RegisterSpec for PRO_DCACHE_MEM_SYNC1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [pro_dcache_mem_sync1::R](R) reader structure"]
impl crate::Readable for PRO_DCACHE_MEM_SYNC1_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [pro_dcache_mem_sync1::W](W) writer structure"]
impl crate::Writable for PRO_DCACHE_MEM_SYNC1_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PRO_DCACHE_MEM_SYNC1 to value 0"]
impl crate::Resettable for PRO_DCACHE_MEM_SYNC1_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 43.769231 | 421 | 0.690466 |
14c636ef45af13f3b6e287ca580bd9b7b8d66e17 | 10,219 | #[doc = "Reader of register HC11_INT"]
pub type R = crate::R<u32, super::HC11_INT>;
#[doc = "Writer for register HC11_INT"]
pub type W = crate::W<u32, super::HC11_INT>;
#[doc = "Register HC11_INT `reset()`'s with value 0"]
impl crate::ResetValue for super::HC11_INT {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `XFERCOMPL`"]
pub type XFERCOMPL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `XFERCOMPL`"]
pub struct XFERCOMPL_W<'a> {
w: &'a mut W,
}
impl<'a> XFERCOMPL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `CHHLTD`"]
pub type CHHLTD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CHHLTD`"]
pub struct CHHLTD_W<'a> {
w: &'a mut W,
}
impl<'a> CHHLTD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `AHBERR`"]
pub type AHBERR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `AHBERR`"]
pub struct AHBERR_W<'a> {
w: &'a mut W,
}
impl<'a> AHBERR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `STALL`"]
pub type STALL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `STALL`"]
pub struct STALL_W<'a> {
w: &'a mut W,
}
impl<'a> STALL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `NAK`"]
pub type NAK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NAK`"]
pub struct NAK_W<'a> {
w: &'a mut W,
}
impl<'a> NAK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `ACK`"]
pub type ACK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ACK`"]
pub struct ACK_W<'a> {
w: &'a mut W,
}
impl<'a> ACK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `XACTERR`"]
pub type XACTERR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `XACTERR`"]
pub struct XACTERR_W<'a> {
w: &'a mut W,
}
impl<'a> XACTERR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `BBLERR`"]
pub type BBLERR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BBLERR`"]
pub struct BBLERR_W<'a> {
w: &'a mut W,
}
impl<'a> BBLERR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `FRMOVRUN`"]
pub type FRMOVRUN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FRMOVRUN`"]
pub struct FRMOVRUN_W<'a> {
w: &'a mut W,
}
impl<'a> FRMOVRUN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `DATATGLERR`"]
pub type DATATGLERR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DATATGLERR`"]
pub struct DATATGLERR_W<'a> {
w: &'a mut W,
}
impl<'a> DATATGLERR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
impl R {
#[doc = "Bit 0 - Transfer Completed"]
#[inline(always)]
pub fn xfercompl(&self) -> XFERCOMPL_R {
XFERCOMPL_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Channel Halted"]
#[inline(always)]
pub fn chhltd(&self) -> CHHLTD_R {
CHHLTD_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - AHB Error"]
#[inline(always)]
pub fn ahberr(&self) -> AHBERR_R {
AHBERR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - STALL Response Received Interrupt"]
#[inline(always)]
pub fn stall(&self) -> STALL_R {
STALL_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - NAK Response Received Interrupt"]
#[inline(always)]
pub fn nak(&self) -> NAK_R {
NAK_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - ACK Response Received/Transmitted Interrupt"]
#[inline(always)]
pub fn ack(&self) -> ACK_R {
ACK_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 7 - Transaction Error"]
#[inline(always)]
pub fn xacterr(&self) -> XACTERR_R {
XACTERR_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - Babble Error"]
#[inline(always)]
pub fn bblerr(&self) -> BBLERR_R {
BBLERR_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Frame Overrun"]
#[inline(always)]
pub fn frmovrun(&self) -> FRMOVRUN_R {
FRMOVRUN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Data Toggle Error"]
#[inline(always)]
pub fn datatglerr(&self) -> DATATGLERR_R {
DATATGLERR_R::new(((self.bits >> 10) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Transfer Completed"]
#[inline(always)]
pub fn xfercompl(&mut self) -> XFERCOMPL_W {
XFERCOMPL_W { w: self }
}
#[doc = "Bit 1 - Channel Halted"]
#[inline(always)]
pub fn chhltd(&mut self) -> CHHLTD_W {
CHHLTD_W { w: self }
}
#[doc = "Bit 2 - AHB Error"]
#[inline(always)]
pub fn ahberr(&mut self) -> AHBERR_W {
AHBERR_W { w: self }
}
#[doc = "Bit 3 - STALL Response Received Interrupt"]
#[inline(always)]
pub fn stall(&mut self) -> STALL_W {
STALL_W { w: self }
}
#[doc = "Bit 4 - NAK Response Received Interrupt"]
#[inline(always)]
pub fn nak(&mut self) -> NAK_W {
NAK_W { w: self }
}
#[doc = "Bit 5 - ACK Response Received/Transmitted Interrupt"]
#[inline(always)]
pub fn ack(&mut self) -> ACK_W {
ACK_W { w: self }
}
#[doc = "Bit 7 - Transaction Error"]
#[inline(always)]
pub fn xacterr(&mut self) -> XACTERR_W {
XACTERR_W { w: self }
}
#[doc = "Bit 8 - Babble Error"]
#[inline(always)]
pub fn bblerr(&mut self) -> BBLERR_W {
BBLERR_W { w: self }
}
#[doc = "Bit 9 - Frame Overrun"]
#[inline(always)]
pub fn frmovrun(&mut self) -> FRMOVRUN_W {
FRMOVRUN_W { w: self }
}
#[doc = "Bit 10 - Data Toggle Error"]
#[inline(always)]
pub fn datatglerr(&mut self) -> DATATGLERR_W {
DATATGLERR_W { w: self }
}
}
| 28.62465 | 86 | 0.533614 |
d6a97b799e094aa354e1ac7c3717789526455019 | 1,618 | use crate::bidiiter::rectstate::OnRectState;
use crate::BidiRect;
use crate::BidiView;
use std::iter::Iterator;
/// An iterator type returning items in a rectangular region.
pub struct OnRect<'v, T: 'v, V: BidiView<Output = T>> {
pub(super) view: &'v V,
pub(super) rect: BidiRect,
pub(super) state: OnRectState,
pub(super) by_column: bool,
}
impl<'v, T: 'v, V: BidiView<Output = T>> OnRect<'v, T, V> {
/// Returns an iterator which yields the items with their original
/// coordinates. Note that all the coordinates are relative to the
/// [`BidiView`] (or other data structure) the iterator was created
/// from.
pub fn with_coords(self) -> super::super::immutable_xy::rect::OnRect<'v, T, V> {
self.state.assert_not_started("with_coords()");
super::super::immutable_xy::rect::OnRect {
view: self.view,
rect: self.rect,
by_column: self.by_column,
state: OnRectState::NotStarted,
}
}
/// Returns an iterator which yields the items by columns instead
/// of by rows as it would otherwise do.
pub fn by_column(mut self) -> Self {
self.state.assert_not_started("by_column()");
self.by_column = true;
self
}
}
impl<'v, T: 'v, V: BidiView<Output = T>> Iterator for OnRect<'v, T, V> {
type Item = &'v T;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
self.state.advance(&self.rect, self.by_column);
if let OnRectState::Iterating(x, y) = self.state {
self.view.get(x, y)
} else {
None
}
}
}
| 32.36 | 84 | 0.605686 |
9c26bf86dd3d9bc3f9b9a3ae68d5e4d87983bc6c | 1,401 | mod build;
mod cmd;
mod common;
mod config;
mod proxy;
mod serve;
mod watch;
use std::path::PathBuf;
use anyhow::Result;
use structopt::StructOpt;
#[async_std::main]
async fn main() -> Result<()> {
let cli = Trunk::from_args();
if let Err(err) = cli.run().await {
eprintln!("{}", err.to_string());
std::process::exit(1);
}
Ok(())
}
/// Build, bundle & ship your Rust WASM application to the web.
#[derive(StructOpt)]
#[structopt(name = "trunk")]
struct Trunk {
#[structopt(subcommand)]
action: TrunkSubcommands,
/// Path to the Trunk config file [default: Trunk.toml]
#[structopt(long, parse(from_os_str), env = "TRUNK_CONFIG")]
pub config: Option<PathBuf>,
}
impl Trunk {
pub async fn run(self) -> Result<()> {
match self.action {
TrunkSubcommands::Build(inner) => inner.run(self.config).await,
TrunkSubcommands::Clean(inner) => inner.run(self.config).await,
TrunkSubcommands::Serve(inner) => inner.run(self.config).await,
TrunkSubcommands::Watch(inner) => inner.run(self.config).await,
TrunkSubcommands::Config(inner) => inner.run(self.config).await,
}
}
}
#[derive(StructOpt)]
enum TrunkSubcommands {
Build(cmd::build::Build),
Clean(cmd::clean::Clean),
Serve(cmd::serve::Serve),
Watch(cmd::watch::Watch),
Config(cmd::config::Config),
}
| 25.472727 | 76 | 0.625268 |
0efaa13d701770c8fb1227cfec96d6f528d55a84 | 1,015 | use crate::channel::{
embed::Embed,
message::{AllowedMentions, MessageFlags},
};
use serde::{Deserialize, Serialize};
/// Optional extra data sent when responding to an [`Interaction`] of type
/// [`ApplicationCommand`].
///
/// This is used when intending to send a message in the response.
///
/// [`Interaction`]: crate::application::interaction::Interaction
/// [`ApplicationCommand`]: crate::application::interaction::Interaction::ApplicationCommand
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct CallbackData {
#[serde(skip_serializing_if = "Option::is_none")]
pub allowed_mentions: Option<AllowedMentions>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub embeds: Vec<Embed>,
#[serde(skip_serializing_if = "Option::is_none")]
pub flags: Option<MessageFlags>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tts: Option<bool>,
}
| 36.25 | 92 | 0.706404 |
9103e12e8e66fdded75788720b304b0848b5b098 | 1,384 | // Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::VecDeque;
impl Solution {
pub fn level_order_bottom(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<Vec<i32>> {
if root.is_none() {
return vec![];
}
let mut result = vec![];
let mut nodes = VecDeque::new();
nodes.push_back(root.clone());
while !nodes.is_empty() {
let mut level = vec![];
let n = nodes.len();
for i in 0..n {
let front = nodes.pop_front().unwrap();
let root = front.unwrap();
let node = root.borrow();
level.push(node.val);
if node.left.is_some() {
nodes.push_back(node.left.clone());
}
if node.right.is_some() {
nodes.push_back(node.right.clone());
}
}
result.push(level);
}
result.reverse();
result
}
}
| 26.615385 | 85 | 0.477601 |
76b1ea8e5cf241af08d642cb8a6cc6ed0c5bd41a | 4,078 | use crate::{Error, STEAM_URL};
use reqwest::{Request, RequestBuilder, Response, Url};
#[derive(Debug, Clone)]
/// Verifies the login details returned after users have gone through the 'sign in with Steam' page
/// # Example
/// ```
/// # use steam_auth::Verifier;
/// # struct Response; impl Response { fn new() -> Self { Self } fn body(&self) -> &'static
/// # str { "foo" } }
/// # fn main() {
/// # let qs = "openid.ns=http%3A%2F%2Fspecs.openid.net%2Fauth%2F2.0&openid.mode=id_res&openid.op_endpoint=https%3A%2F%2Fsteamcommunity.com%2Fopenid%2Flogin&openid.claimed_id=https%3A%2F%2Fsteamcommunity.com%2Fopenid%2Fid%2F92345666790633291&openid.identity=https%3A%2F%2Fsteamcommunity.com%2Fopenid%2Fid%2F12333456789000000&openid.return_to=http%3A%2F%2Flocalhost%3A8080%2Fcallback&openid.response_nonce=2019-06-15T00%3A36%3A00Z7nVIS5lDAcZe%2FT0gT4%2BQNQyexyA%3D&openid.assoc_handle=1234567890&openid.signed=signed%2Cop_endpoint%2Cclaimed_id%2Cidentity%2Creturn_to%2Cresponse_nonce%2Cassoc_handle&openid.sig=BK0zC%2F%2FKzERs7N%2BNlDO0aL06%2BBA%3D";
/// let (req, verifier) = Verifier::from_querystring(qs).unwrap();
/// // send off req, get back response
/// # let response = Response;
/// match verifier.verify_response(response.body()) {
/// Ok(steam_id) => (), // got steam id
/// Err(e) => (), // Auth failure
/// }
/// # }
/// ```
pub struct Verifier {
claimed_id: u64,
}
impl Verifier {
pub async fn from_querystring<S: AsRef<str>>(s: S) -> Result<(Response, Self), Error> {
let form = serde_urlencoded::from_str(s.as_ref()).map_err(Error::Deserialize)?;
Self::from_parsed(form).await
}
pub async fn from_parsed(
mut login_data: SteamLoginData,
) -> Result<(Response, Self), Error> {
login_data.mode = "check_authentication".to_owned();
let verifier = {
let url = url::Url::parse(&login_data.claimed_id).map_err(|_| Error::ParseSteamId)?;
let mut segments = url.path_segments().ok_or(Error::ParseSteamId)?;
let id_segment = segments.next_back().ok_or(Error::ParseSteamId)?;
let claimed_id = id_segment.parse::<u64>().map_err(|_| Error::ParseSteamId)?;
Self { claimed_id }
};
let form_data = serde_urlencoded::to_string(login_data)
.map_err(Error::Serialize)?
.into_bytes();
let response = reqwest::Client::new().request(reqwest::Method::POST, STEAM_URL)
.header("Content-Type", "application/x-www-form-urlencoded")
.body(form_data)
.send().await.map_err(Error::Reqwest)?;
Ok((response, verifier))
}
/// Verifies the response from the steam servers.
pub fn verify_response<S: Into<String>>(self, response_body: S) -> Result<u64, Error> {
let is_valid = response_body
.into()
.split('\n')
.filter_map(|line| {
// Allow values to contain colons, but not keys
let mut pair = line.splitn(2, ':');
Some((pair.next()?, pair.next()?))
})
.any(|(k, v)| k == "is_valid" && v == "true");
if is_valid {
Ok(self.claimed_id)
} else {
Err(Error::AuthenticationFailed)
}
}
}
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct SteamLoginData {
#[serde(rename = "openid.ns")]
ns: String,
#[serde(rename = "openid.mode")]
mode: String,
#[serde(rename = "openid.op_endpoint")]
op_endpoint: String,
#[serde(rename = "openid.claimed_id")]
claimed_id: String,
#[serde(rename = "openid.identity")]
identity: Option<String>,
#[serde(rename = "openid.return_to")]
return_to: String,
#[serde(rename = "openid.response_nonce")]
response_nonce: String,
#[serde(rename = "openid.invalidate_handle")]
invalidate_handle: Option<String>,
#[serde(rename = "openid.assoc_handle")]
assoc_handle: String,
#[serde(rename = "openid.signed")]
signed: String,
#[serde(rename = "openid.sig")]
sig: String,
}
| 38.838095 | 649 | 0.633399 |
33ba85d593e3b80d77cca5d17d31d8b467dc61a4 | 234 | // variables2.rs
// Make me compile! Execute the command `rustlings hint variables2` if you want a hint :)
fn main() {
let x:u32 = 0;
if x == 10 {
println!("Ten!");
} else {
println!("Not ten!");
}
}
| 18 | 89 | 0.534188 |
90999c878189b0deb300e83dc532d2e69a5bde94 | 289 | //! Tests auto-converted from "sass-spec/spec/non_conformant/scss-tests/171_test_loud_comment_in_compressed_mode.hrx"
#[test]
fn test() {
assert_eq!(
crate::rsass(
"/*! foo */\
\n"
)
.unwrap(),
"/*! foo */\
\n"
);
}
| 19.266667 | 117 | 0.49481 |
2f4913507d07879a3c793f9f41dfdaa29cdeae6e | 19,954 | use std::sync::Arc;
use piston_meta::bootstrap::Convert;
use range::Range;
use Dfn;
/// Stores a Dyon type.
#[derive(Debug, Clone, PartialEq)]
pub enum Type {
/// Whether a statement is never reached.
Unreachable,
/// A no-type.
Void,
/// Any type.
Any,
/// Boolean type.
Bool,
/// F64 type.
F64,
/// 4D vector type.
Vec4,
/// 4D matrix type.
Mat4,
/// String/text type.
Str,
/// Link type.
Link,
/// Array type.
Array(Box<Type>),
/// Object type.
Object,
/// Option type.
Option(Box<Type>),
/// Result type.
Result(Box<Type>),
/// Secret type.
Secret(Box<Type>),
/// Thread handle type.
Thread(Box<Type>),
/// In-type.
In(Box<Type>),
/// Ad-hoc type.
AdHoc(Arc<String>, Box<Type>),
/// Closure type.
Closure(Box<Dfn>),
}
impl Type {
/// Returns an extension quantified over ad-hoc types.
///
/// For example, `(vec4, vec4) -> vec4` becomes `all T { (T vec4, T vec4) -> T vec4 }`.
pub fn all_ext(args: Vec<Type>, ret: Type) -> (Vec<Arc<String>>, Vec<Type>, Type) {
use crate::T;
use Type::AdHoc;
(vec![T.clone()], args.into_iter().map(|arg| AdHoc(T.clone(), Box::new(arg))).collect(),
AdHoc(T.clone(), Box::new(ret)))
}
/// Returns description of the type.
pub fn description(&self) -> String {
use Type::*;
match *self {
Unreachable => "unreachable".into(),
Void => "void".into(),
Any => "any".into(),
Bool => "bool".into(),
F64 => "f64".into(),
Vec4 => "vec4".into(),
Mat4 => "mat4".into(),
Str => "str".into(),
Link => "link".into(),
Array(ref ty) => {
if let Any = **ty {
"[]".into()
} else {
let mut res = String::from("[");
res.push_str(&ty.description());
res.push(']');
res
}
}
Object => "{}".into(),
Option(ref ty) => {
if let Any = **ty {
"opt".into()
} else {
let mut res = String::from("opt[");
res.push_str(&ty.description());
res.push(']');
res
}
}
Result(ref ty) => {
if let Any = **ty {
"res".into()
} else {
let mut res = String::from("res[");
res.push_str(&ty.description());
res.push(']');
res
}
}
Secret(ref ty) => {
match **ty {
Bool => "sec[bool]".into(),
F64 => "sec[f64]".into(),
_ => panic!("Secret only supports `bool` and `f64`")
}
}
Thread(ref ty) => {
if let Any = **ty {
"thr".into()
} else {
let mut res = String::from("thr[");
res.push_str(&ty.description());
res.push(']');
res
}
}
In(ref ty) => {
if let Any = **ty {
"in".into()
} else {
let mut res = String::from("in[");
res.push_str(&ty.description());
res.push(']');
res
}
}
AdHoc(ref ad, ref ty) => {
(&**ad).clone() + " " + &ty.description()
}
Closure(ref closure) => {
let mut s = String::new();
s.push_str("\\(");
for (i, ty) in closure.tys.iter().enumerate() {
s.push_str(&ty.description());
if i + 1 < closure.tys.len() {
s.push_str(", ");
}
}
s.push_str(") -> ");
s.push_str(&closure.ret.description());
s
}
}
}
/// Returns an array type with an `any` as inner type.
pub fn array() -> Type {Type::Array(Box::new(Type::Any))}
/// Returns an object type.
pub fn object() -> Type {Type::Object}
/// Returns an Option type with an `any` as inner type.
pub fn option() -> Type {Type::Option(Box::new(Type::Any))}
/// Returns a Result type with an `any` as inner type.
pub fn result() -> Type {Type::Result(Box::new(Type::Any))}
/// Returns a thread handle type with an `any` as inner type.
pub fn thread() -> Type {Type::Thread(Box::new(Type::Any))}
/// Returns an in-type with an `any` as inner type.
pub fn in_ty() -> Type {Type::In(Box::new(Type::Any))}
/// Binds refinement type variables.
///
/// Returns the type argument to compare to.
pub fn bind_ty_vars(
&self,
refine: &Type,
names: &[Arc<String>],
ty_vars: &mut Vec<Option<Arc<String>>>
) -> Result<Type, String> {
if names.len() == 0 {return Ok(self.clone())};
match (self, refine) {
(&Type::AdHoc(ref a_name, ref a_inner_ty),
&Type::AdHoc(ref b_name, ref b_inner_ty)) => {
for i in 0..names.len() {
if a_name == &names[i] {
let new_inner = a_inner_ty.bind_ty_vars(b_inner_ty, names, ty_vars)?;
if let Some(ref existing_name) = ty_vars[i] {
if existing_name != b_name &&
new_inner.goes_with(b_inner_ty) &&
!new_inner.ambiguous(b_inner_ty)
{
return Err(format!("Type mismatch (#1500): Expected `{}`, found `{}`",
existing_name, b_name))
} else {
return Ok(Type::AdHoc(existing_name.clone(),
Box::new(new_inner)))
}
} else {
ty_vars[i] = Some(b_name.clone());
return Ok(Type::AdHoc(b_name.clone(),
Box::new(a_inner_ty.bind_ty_vars(b_inner_ty, names, ty_vars)?)))
}
}
}
Ok(Type::AdHoc(a_name.clone(),
Box::new(a_inner_ty.bind_ty_vars(b_inner_ty, names, ty_vars)?)))
}
(&Type::AdHoc(ref a_name, ref a_inner_ty), ref b) => {
for i in 0..names.len() {
if a_name == &names[i] {
let new_inner = a_inner_ty.bind_ty_vars(refine, names, ty_vars)?;
if let Some(ref n) = ty_vars[i] {
if new_inner.goes_with(b) && !new_inner.ambiguous(b) {
return Err(format!(
"Type mismatch (#1600): Expected `{}`, found no ad-hoc type", n))
}
} else {break}
}
}
a_inner_ty.bind_ty_vars(refine, names, ty_vars)
}
_ => Ok(self.clone())
}
}
/// Inserts variable name, replacing ad-hoc type name.
pub fn insert_var(&mut self, name: &Arc<String>, val: &Arc<String>) {
match *self {
Type::AdHoc(ref mut n, ref mut inner_ty) => {
if n == name {
*n = val.clone();
}
inner_ty.insert_var(name, val)
}
_ => {}
}
}
/// Inserts a none ad-hoc variable.
pub fn insert_none_var(&mut self, name: &Arc<String>) {
match *self {
Type::AdHoc(_, ref mut inner_ty) => {
inner_ty.insert_none_var(name);
*self = (**inner_ty).clone();
}
_ => {}
}
}
/// Returns `true` if a type to be refined is ambiguous relative to this type (directional check).
///
/// For example, the type ad-hoc type `Foo str` is ambiguous with type `str`.
/// If more was known about the `str` type with further refinement,
/// then it might turn out to be `Bar str`, which triggers a collision.
pub fn ambiguous(&self, refine: &Type) -> bool {
use self::Type::*;
match (self, refine) {
(&AdHoc(ref xa, ref xb), &AdHoc(ref ya, ref yb)) if xa == ya => xb.ambiguous(yb),
(&AdHoc(_, ref x), y) if x.goes_with(y) => true,
(&Array(ref x), &Array(ref y)) if x.ambiguous(y) => true,
(&Option(ref x), &Option(ref y)) if x.ambiguous(y) => true,
(&Result(ref x), &Result(ref y)) if x.ambiguous(y) => true,
(&Thread(ref x), &Thread(ref y)) if x.ambiguous(y) => true,
(&In(ref x), &In(ref y)) if x.ambiguous(y) => true,
(&Bool, &Any) => true,
(&F64, &Any) => true,
(&Str, &Any) => true,
(&Vec4, &Any) => true,
(&Mat4, &Any) => true,
(&Link, &Any) => true,
(&Array(_), &Any) => true,
(&Option(_), &Any) => true,
(&Result(_), &Any) => true,
(&Thread(_), &Any) => true,
(&Secret(_), &Any) => true,
(&In(_), &Any) => true,
_ => false
}
}
/// Returns `true` if the type can be a closure, `false` otherwise.
pub fn closure_ret_ty(&self) -> Option<Type> {
use self::Type::*;
match *self {
Closure(ref ty) => Some(ty.ret.clone()),
AdHoc(_, ref x) => x.closure_ret_ty(),
Any => Some(Type::Any),
_ => None
}
}
/// Returns `true` if a type goes with another type (directional check).
///
/// - `bool` (argument) goes with `sec[bool]` (value)
/// - `f64` (argument) goes with `sec[f64]` (value)
///
/// The opposite is not true, since `sec` contains extra information.
pub fn goes_with(&self, other: &Type) -> bool {
use self::Type::*;
// Invert the order because of complex ad-hoc logic.
if let AdHoc(_, _) = *other {
if let AdHoc(_, _) = *self {}
else {
return other.goes_with(self)
}
}
if let Secret(ref other_ty) = *other {
return if let Secret(ref this_ty) = *self {
this_ty.goes_with(other_ty)
} else {
self.goes_with(other_ty)
};
}
match self {
// Unreachable goes with anything.
&Unreachable => true,
_ if *other == Unreachable => true,
&Any => *other != Void,
// Void only goes with void.
&Void => *other == Void,
&Array(ref arr) => {
if let Array(ref other_arr) = *other {
arr.goes_with(other_arr)
} else if let Any = *other {
true
} else {
false
}
}
&Object => {
if let Object = *other {
true
} else if let Any = *other {
true
} else {
false
}
}
&Option(ref opt) => {
if let Option(ref other_opt) = *other {
opt.goes_with(other_opt)
} else if let Any = *other {
true
} else {
false
}
}
&Result(ref res) => {
if let Result(ref other_res) = *other {
res.goes_with(other_res)
} else if let Any = *other {
true
} else {
false
}
}
&Thread(ref thr) => {
if let Thread(ref other_thr) = *other {
thr.goes_with(other_thr)
} else if let Any = *other {
true
} else {
false
}
}
&In(ref in_ty) => {
if let In(ref other_ty) = *other {
in_ty.goes_with(other_ty)
} else if let Any = *other {
true
} else {
false
}
}
&Closure(ref cl) => {
if let Closure(ref other_cl) = *other {
if cl.tys.len() != other_cl.tys.len() { return false; }
if !cl.tys.iter().zip(other_cl.tys.iter()).all(|(a, b)| a.goes_with(b)) {
return false;
}
if !cl.ret.goes_with(&other_cl.ret) { return false; }
true
} else if let Any = *other {
true
} else {
false
}
}
&AdHoc(ref name, ref ty) => {
if let AdHoc(ref other_name, ref other_ty) = *other {
name == other_name && ty.goes_with(other_ty)
} else if let Void = *other {
false
} else {
ty.goes_with(other)
}
}
// Bool, F64, Text, Vec4.
x if x == other => { true }
_ if *other == Type::Any => { true }
_ => { false }
}
}
/// Infers type from the `+=` operator.
pub fn add_assign(&self, other: &Type) -> bool {
use self::Type::*;
match (self, other) {
(&AdHoc(ref name, ref ty), &AdHoc(ref other_name, ref other_ty)) => {
if name != other_name { return false; }
if !ty.goes_with(other_ty) { return false; }
ty.add_assign(other_ty)
}
(&AdHoc(_, _), _) | (_, &AdHoc(_, _)) => false,
(&Void, _) | (_, &Void) => false,
_ => true
}
}
/// Converts meta data into a type.
pub fn from_meta_data(node: &str, mut convert: Convert, ignored: &mut Vec<Range>)
-> Result<(Range, Type), ()> {
let start = convert;
let start_range = convert.start_node(node)?;
convert.update(start_range);
let mut ty: Option<Type> = None;
loop {
if let Ok(range) = convert.end_node(node) {
convert.update(range);
break;
} else if let Ok((range, _)) = convert.meta_bool("any") {
convert.update(range);
ty = Some(Type::Any);
} else if let Ok((range, _)) = convert.meta_bool("bool") {
convert.update(range);
ty = Some(Type::Bool);
} else if let Ok((range, _)) = convert.meta_bool("sec_bool") {
convert.update(range);
ty = Some(Type::Secret(Box::new(Type::Bool)));
} else if let Ok((range, _)) = convert.meta_bool("f64") {
convert.update(range);
ty = Some(Type::F64);
} else if let Ok((range, _)) = convert.meta_bool("sec_f64") {
convert.update(range);
ty = Some(Type::Secret(Box::new(Type::F64)));
} else if let Ok((range, _)) = convert.meta_bool("str") {
convert.update(range);
ty = Some(Type::Str);
} else if let Ok((range, _)) = convert.meta_bool("vec4") {
convert.update(range);
ty = Some(Type::Vec4);
} else if let Ok((range, _)) = convert.meta_bool("mat4") {
convert.update(range);
ty = Some(Type::Mat4);
} else if let Ok((range, _)) = convert.meta_bool("link") {
convert.update(range);
ty = Some(Type::Link);
} else if let Ok((range, _)) = convert.meta_bool("opt_any") {
convert.update(range);
ty = Some(Type::Option(Box::new(Type::Any)));
} else if let Ok((range, _)) = convert.meta_bool("res_any") {
convert.update(range);
ty = Some(Type::Result(Box::new(Type::Any)));
} else if let Ok((range, _)) = convert.meta_bool("arr_any") {
convert.update(range);
ty = Some(Type::Array(Box::new(Type::Any)));
} else if let Ok((range, _)) = convert.meta_bool("obj_any") {
convert.update(range);
ty = Some(Type::Object);
} else if let Ok((range, _)) = convert.meta_bool("thr_any") {
convert.update(range);
ty = Some(Type::Thread(Box::new(Type::Any)));
} else if let Ok((range, _)) = convert.meta_bool("in_any") {
convert.update(range);
ty = Some(Type::In(Box::new(Type::Any)));
} else if let Ok((range, val)) = Type::from_meta_data(
"opt", convert, ignored) {
convert.update(range);
ty = Some(Type::Option(Box::new(val)));
} else if let Ok((range, val)) = Type::from_meta_data(
"res", convert, ignored) {
convert.update(range);
ty = Some(Type::Result(Box::new(val)));
} else if let Ok((range, val)) = Type::from_meta_data(
"arr", convert, ignored) {
convert.update(range);
ty = Some(Type::Array(Box::new(val)));
} else if let Ok((range, val)) = Type::from_meta_data(
"thr", convert, ignored) {
convert.update(range);
ty = Some(Type::Thread(Box::new(val)));
} else if let Ok((range, val)) = Type::from_meta_data(
"in", convert, ignored) {
convert.update(range);
ty = Some(Type::In(Box::new(val)));
} else if let Ok((range, val)) = convert.meta_string("ad_hoc") {
convert.update(range);
let inner_ty = if let Ok((range, val)) = Type::from_meta_data(
"ad_hoc_ty", convert, ignored) {
convert.update(range);
val
} else {
Type::Object
};
ty = Some(Type::AdHoc(val, Box::new(inner_ty)));
} else if let Ok(range) = convert.start_node("closure_type") {
convert.update(range);
let mut lts = vec![];
let mut tys = vec![];
while let Ok((range, val)) = Type::from_meta_data(
"cl_arg", convert, ignored) {
use Lt;
convert.update(range);
lts.push(Lt::Default);
tys.push(val);
}
let (range, ret) = Type::from_meta_data("cl_ret", convert, ignored)?;
convert.update(range);
let range = convert.end_node("closure_type")?;
convert.update(range);
ty = Some(Type::Closure(Box::new(
Dfn { lts, tys, ret, ext: vec![], lazy: crate::LAZY_NO }
)));
} else {
let range = convert.ignore();
convert.update(range);
ignored.push(range);
}
}
Ok((convert.subtract(start), ty.ok_or(())?))
}
}
| 37.089219 | 102 | 0.423825 |
d7a6d39c367c61bbdeab73cef80a41b16891b4ad | 23,463 | use sdl2::event::Event;
use sdl2::image::{InitFlag, LoadTexture};
use sdl2::keyboard::Keycode;
use sdl2::messagebox::ClickedButton;
use sdl2::messagebox::*;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::{Canvas, Texture};
use sdl2::video::Window;
// use shakmaty::{Board, Chess, File, Move, Position, Rank, Role, Setup, Square};
// use shogai::ai::*;
use shogai::board::*;
use shogai::movement::*;
use shogai::piece::*;
use shogai::position::*;
use std::collections::HashSet;
use std::path::Path;
const SRC_RESERVE_HEIGTH: u32 = 100;
const SCR_WIDTH: u32 = 603;
const SCR_HEIGHT: u32 = 603 + 2 * SRC_RESERVE_HEIGTH;
const SQR_SIZE: u32 = SCR_WIDTH / 9;
pub fn init() -> Result<(), String> {
// sdl things
let context = sdl2::init().unwrap();
let video = context.video().unwrap();
let _image_context = sdl2::image::init(InitFlag::PNG)?;
let window = match video
.window("Shogi", SCR_WIDTH, SCR_HEIGHT)
.position_centered()
.opengl()
.build()
{
Ok(window) => window,
Err(err) => panic!("failed to create window: {}", err),
};
let mut canvas = window
.into_canvas()
.software()
.build()
.map_err(|e| e.to_string())?;
let mut events = context.event_pump()?;
canvas.set_draw_color(Color::RGB(0xD1, 0x8B, 0x47));
canvas.clear();
let texture_creator = canvas.texture_creator();
// define standard board
let mut game = Board::new();
// completely transparent texture
let nothing = texture_creator.load_texture(Path::new("src/sprites/nothing.png"))?;
// load white pieces' src/sprites.
// credits for src/sprites: Wikimedia Commons
let w_k = texture_creator.load_texture(Path::new("src/sprites/white/k.png"))?;
let w_r = texture_creator.load_texture(Path::new("src/sprites/white/r.png"))?;
let w_b = texture_creator.load_texture(Path::new("src/sprites/white/b.png"))?;
let w_p = texture_creator.load_texture(Path::new("src/sprites/white/p.png"))?;
let w_n = texture_creator.load_texture(Path::new("src/sprites/white/n.png"))?;
let w_l = texture_creator.load_texture(Path::new("src/sprites/white/l.png"))?;
let w_g = texture_creator.load_texture(Path::new("src/sprites/white/g.png"))?;
let w_s = texture_creator.load_texture(Path::new("src/sprites/white/s.png"))?;
let w_bp = texture_creator.load_texture(Path::new("src/sprites/white/bp.png"))?;
let w_rp = texture_creator.load_texture(Path::new("src/sprites/white/rp.png"))?;
let w_pp = texture_creator.load_texture(Path::new("src/sprites/white/pp.png"))?;
let w_lp = texture_creator.load_texture(Path::new("src/sprites/white/lp.png"))?;
let w_np = texture_creator.load_texture(Path::new("src/sprites/white/np.png"))?;
let w_sp = texture_creator.load_texture(Path::new("src/sprites/white/sp.png"))?;
// load black pieces' src/sprites.
let b_k = texture_creator.load_texture(Path::new("src/sprites/black/k.png"))?;
let b_r = texture_creator.load_texture(Path::new("src/sprites/black/r.png"))?;
let b_b = texture_creator.load_texture(Path::new("src/sprites/black/b.png"))?;
let b_p = texture_creator.load_texture(Path::new("src/sprites/black/p.png"))?;
let b_n = texture_creator.load_texture(Path::new("src/sprites/black/n.png"))?;
let b_l = texture_creator.load_texture(Path::new("src/sprites/black/l.png"))?;
let b_g = texture_creator.load_texture(Path::new("src/sprites/black/g.png"))?;
let b_s = texture_creator.load_texture(Path::new("src/sprites/black/s.png"))?;
let b_bp = texture_creator.load_texture(Path::new("src/sprites/black/bp.png"))?;
let b_rp = texture_creator.load_texture(Path::new("src/sprites/black/rp.png"))?;
let b_pp = texture_creator.load_texture(Path::new("src/sprites/black/pp.png"))?;
let b_lp = texture_creator.load_texture(Path::new("src/sprites/black/lp.png"))?;
let b_np = texture_creator.load_texture(Path::new("src/sprites/black/np.png"))?;
let b_sp = texture_creator.load_texture(Path::new("src/sprites/black/sp.png"))?;
let piece_to_texture = |piece: &Piece| {
if piece.promoted {
match piece.color {
shogai::piece::Color::White => match piece.piecetype {
PieceType::Pawn => &w_pp,
PieceType::Bishop => &w_bp,
PieceType::Rook => &w_rp,
PieceType::Knight => &w_np,
PieceType::King => &w_k,
PieceType::Gold => &w_g,
PieceType::Lance => &w_lp,
PieceType::Silver => &w_sp,
},
shogai::piece::Color::Black => match piece.piecetype {
PieceType::Pawn => &b_pp,
PieceType::Bishop => &b_bp,
PieceType::Rook => &b_rp,
PieceType::Knight => &b_np,
PieceType::King => &b_k,
PieceType::Gold => &b_g,
PieceType::Lance => &b_lp,
PieceType::Silver => &b_sp,
},
}
} else {
match piece.color {
shogai::piece::Color::White => match piece.piecetype {
PieceType::Pawn => &w_p,
PieceType::Bishop => &w_b,
PieceType::Rook => &w_r,
PieceType::Knight => &w_n,
PieceType::King => &w_k,
PieceType::Gold => &w_g,
PieceType::Lance => &w_l,
PieceType::Silver => &w_s,
},
shogai::piece::Color::Black => match piece.piecetype {
PieceType::Pawn => &b_p,
PieceType::Bishop => &b_b,
PieceType::Rook => &b_r,
PieceType::Knight => &b_n,
PieceType::King => &b_k,
PieceType::Gold => &b_g,
PieceType::Lance => &b_l,
PieceType::Silver => &b_s,
},
}
}
};
// This will parse and draw all pieces currently on the game to the window.
let draw_pieces = |canvas: &mut Canvas<Window>, game: &Board, hidden: Option<Piece>| {
let mut piece_hidden = false;
for (j, piece) in game.iter().enumerate() {
//TODO filter "only once" to remove only on exemplary of pieces in reserve
if !piece_hidden && Some(*piece) == hidden {
piece_hidden = true;
continue;
}
if let Some(i) = piece.position {
draw_piece(canvas, piece_to_texture(piece), i);
} else {
let count = game.iter().take(j).filter(|p| p == &piece).count();
draw_piece_on_reserve(canvas, piece_to_texture(piece), piece, count);
//TODO manage drawing multiple identical pieces
}
}
};
let get_mouse_position: fn(sdl2::mouse::MouseState) -> Option<Position> = |mouse_state| {
if mouse_state.y() >= SRC_RESERVE_HEIGTH as i32
&& mouse_state.y() <= SCR_HEIGHT as i32 - SRC_RESERVE_HEIGTH as i32
{
return Some(Position(
(9 - (mouse_state.x() / SQR_SIZE as i32) as u16)
+ ((mouse_state.y() - SRC_RESERVE_HEIGTH as i32) / SQR_SIZE as i32) as u16 * 9
- 1,
));
} else {
//manage get from reserve
return None;
}
};
// We need to set this before the render loop to avoid undefined behaviour,
// so we just set an arbritary texture to this by now.
let mut curr_texture: &Texture = ¬hing;
let mut hidden = None;
// arbitrary to avoid undefined behaviour
let mut prev_click_pos: Option<Position> = None;
let mut prev_role_click: Option<PieceType> = None;
let mut curr_role_click: Option<PieceType> = None;
let mut curr_click_pos: Option<Position> = None;
let mut prev_mouse_buttons = HashSet::new();
let mut has_played = false;
//main loop start ####################################
//####################################################
//###################################################
'main_loop: loop {
for event in events.poll_iter() {
// if esc is pressed, exit main loop
// (consequently ending the program)
match event {
Event::Quit { .. }
| Event::KeyDown {
keycode: Some(Keycode::Escape),
..
} => break 'main_loop,
_ => false,
};
}
if has_played {
//game_over check is *very* expensive, don't do it everytime or UI lag
//also may be used for future multithreading
has_played = false;
if game.game_over() {
let who;
if game.get_turn() {
who = "second player";
} else {
who = "first player";
}
let message = [who, &"has won the game!"].join(" ");
return show_simple_message_box(
MessageBoxFlag::empty(),
&"Game Over",
&message,
canvas.window(),
)
.map_err(|e| e.to_string());
}
}
let mouse_state = events.mouse_state();
let curr_mouse_buttons: HashSet<_> = mouse_state.pressed_mouse_buttons().collect();
canvas.set_draw_color(Color::RGB(0xD1, 0x8B, 0x47));
canvas.clear();
draw_shogiban(&mut canvas);
let mut human_play = |game: &mut Board| {
let get_texture = |game: &Board| {
if let Some(pos) = get_mouse_position(mouse_state) {
match game.is_occupied_by(pos) {
Some(piece) => piece_to_texture(&piece),
None => ¬hing,
}
} else {
¬hing
}
};
//select in green movable pieces on the board
if let Some(pos) = prev_click_pos {
if let Some(selected_piece) = game.is_occupied_by(pos) {
if selected_piece.color == game.get_color() {
draw_select(pos, &mut canvas);
}
}
}
let is_mouse_released = &prev_mouse_buttons - &curr_mouse_buttons;
prev_mouse_buttons = curr_mouse_buttons.clone();
prev_role_click = curr_role_click;
prev_click_pos = curr_click_pos;
if !is_mouse_released.is_empty() {
curr_texture = get_texture(&game);
}
if !is_mouse_released.is_empty() {
if let Some(pos) = get_mouse_position(mouse_state) {
curr_role_click = match game.is_occupied_by(pos) {
None => None,
Some(piece) => Some(piece.piecetype),
};
if let Some(piece) = game.is_occupied_by(pos) {
if piece.color == game.get_color() {
hidden = game.is_occupied_by(pos);
} else {
curr_role_click = None;
}
} else {
hidden = None;
}
} else if let Some(piecetype) = get_in_reserve(mouse_state, &game) {
//drag n drop from reserve (drop move)
if game.iter().any(|p| {
p.color == game.get_color()
&& p.piecetype == piecetype
&& p.position == None
}) {
curr_role_click = Some(piecetype);
curr_click_pos = None;
hidden = Some(Piece {
color: game.get_color(),
piecetype: piecetype,
position: None,
promoted: false,
});
} else {
curr_role_click = None;
hidden = None;
}
} else {
curr_role_click = None;
hidden = None;
}
curr_click_pos = get_mouse_position(mouse_state);
if let Some(piecetype) = prev_role_click {
if let Some(end) = curr_click_pos {
let full_mv = Movement {
piecetype: piecetype,
start: prev_click_pos,
end: end,
promotion: false,
force_capture: false,
withdraw: false,
restart: false,
};
let full_mv_with_promotion = Movement {
promotion: true,
..full_mv
};
let res1 = game.check_move(&full_mv.to_string()).is_ok();
let mut res2 = game.check_move(&full_mv_with_promotion.to_string()).is_ok()
&& (full_mv_with_promotion.to_string() != full_mv.to_string());
if let Some(pos) = prev_click_pos {
if let Some(piece) = game.is_occupied_by(pos) {
if piece.promoted {
//no need to buzz the player if the piece is already promoted
res2 = false;
}
}
}
//^ necessary as to_string 'ing drops with promotion delete the
//(impossible) promotion
let chosen_move;
if res1 && !res2 {
chosen_move = full_mv;
} else if res2 && !res1 {
chosen_move = full_mv_with_promotion;
} else if res1 && res2 {
//ask wether to promote
let buttons: Vec<_> = vec![
ButtonData {
flags: MessageBoxButtonFlag::RETURNKEY_DEFAULT,
button_id: 1,
text: "Promote",
},
ButtonData {
flags: MessageBoxButtonFlag::NOTHING,
button_id: 2,
text: "Do not promote",
},
];
let res: ClickedButton = show_message_box(
MessageBoxFlag::empty(),
buttons.as_slice(),
"",
"Do you want to promote the piece ?",
canvas.window(),
None,
)
.unwrap();
chosen_move = match res {
ClickedButton::CloseButton => full_mv_with_promotion,
ClickedButton::CustomButton(buttondata) => {
match buttondata.button_id {
1 => full_mv_with_promotion,
_ => full_mv,
}
}
};
} else {
chosen_move = full_mv; //to satisfy checker, but is caught later anyway
}
curr_role_click = None;
curr_click_pos = None;
let mv = chosen_move.to_string();
if game.check_move(&mv).is_ok() {
*game = game.play_move_unchecked(&mv);
curr_texture = ¬hing;
hidden = None;
has_played = true;
} else {
hidden = None;
}
}
}
}
//drag effect
if let Some(_) = curr_role_click {
if let Some(pos) = curr_click_pos {
//manage drag pieces from reserve
if let Some(piece) = game.is_occupied_by(pos) {
if piece.color == game.get_color() {
let _ = canvas.copy(
curr_texture,
None,
Rect::new(
mouse_state.x() as i32 - SQR_SIZE as i32 / 2,
mouse_state.y() as i32 - SQR_SIZE as i32 / 2,
SQR_SIZE,
SQR_SIZE,
),
);
}
}
}
}
if let Some(piecetype) = curr_role_click {
if let None = curr_click_pos {
let drag_piece = Piece {
color: game.get_color(),
piecetype: piecetype,
position: None,
promoted: false,
};
curr_texture = piece_to_texture(&drag_piece);
let _ = canvas.copy(
curr_texture,
None,
Rect::new(
mouse_state.x() as i32 - SQR_SIZE as i32 / 2,
mouse_state.y() as i32 - SQR_SIZE as i32 / 2,
SQR_SIZE,
SQR_SIZE,
),
);
}
}
};
let human_turn = !game.get_turn();
if human_turn {
human_play(&mut game);
} else {
let mv = shogai::ai::greedy(&game);
game = game.play_move(&mv);
has_played = true;
}
// for AI play, don't forget to maybe multithread and make use of the has_played flag to
// reduce game_over checks
// ai should look like this
// if turn of AI {
// mv = ai guess move
// game.play(move)
// has_played = true
//}
draw_pieces(&mut canvas, &game, hidden);
canvas.present();
// if you don't do this cpu usage will skyrocket to 100%
events.wait_event_timeout(10);
events.poll_event();
//draw_check(&game, &mut canvas);
}
Ok(())
}
//-----------------------------------------------------------------------------------
//
fn get_side(mouse_state: sdl2::mouse::MouseState) -> Option<shogai::piece::Color> {
if mouse_state.y() <= SRC_RESERVE_HEIGTH as i32 {
return Some(shogai::piece::Color::White);
}
if mouse_state.y() >= SCR_HEIGHT as i32 - SRC_RESERVE_HEIGTH as i32 {
return Some(shogai::piece::Color::Black);
}
return None;
}
fn get_in_reserve(mouse_state: sdl2::mouse::MouseState, game: &Board) -> Option<PieceType> {
if Some(game.get_color()) != get_side(mouse_state) {
return None;
} else {
// in reserve
match mouse_state.x() * 9 / SCR_WIDTH as i32 {
0 => Some(PieceType::Pawn),
1 => Some(PieceType::Pawn),
2 => Some(PieceType::Pawn),
3 => Some(PieceType::Knight),
4 => Some(PieceType::Lance),
5 => Some(PieceType::Rook),
6 => Some(PieceType::Bishop),
7 => Some(PieceType::Gold),
8 => Some(PieceType::Silver),
_ => None,
}
}
}
fn draw_piece(canvas: &mut Canvas<Window>, texture: &Texture, i: Position) {
canvas
.copy(
texture,
None,
Rect::new(
((9 - (i.0 as u32 % 9) - 1) * SQR_SIZE) as i32,
(i.0 as u32 / 9 * SQR_SIZE) as i32 + SRC_RESERVE_HEIGTH as i32,
SQR_SIZE,
SQR_SIZE,
),
)
.unwrap();
}
fn draw_piece_on_reserve(
canvas: &mut Canvas<Window>,
texture: &Texture,
piece: &Piece,
count: usize,
) {
let mut x = 2 * match piece.piecetype {
PieceType::Pawn => 0,
PieceType::Knight => 3,
PieceType::Lance => 4,
PieceType::Rook => 5,
PieceType::Bishop => 6,
PieceType::Gold => 7,
PieceType::Silver => 8,
PieceType::King => panic!("King was found in reserve, what kind of shit is this?"),
};
let spacing_multiplier = 10; //pixels per identical pieces
let y: usize;
if piece.color == shogai::piece::Color::White {
y = (count % 4) * spacing_multiplier;
} else {
y = SCR_HEIGHT as usize - SQR_SIZE as usize - (count % 4) * spacing_multiplier;
}
if x == 0 {
//only for pawns as there can be many pawns
x += count / 4;
}
canvas
.copy(
texture,
None,
Rect::new(
x as i32 * SCR_WIDTH as i32 / 9 / 2,
y as i32,
SQR_SIZE,
SQR_SIZE,
),
)
.unwrap();
}
fn draw_shogiban(canvas: &mut Canvas<Window>) {
draw_grid(canvas);
canvas.set_draw_color(Color::RGB(0x75, 0x48, 0x3B));
let _ = canvas.fill_rect(Rect::new(0, 0, SCR_WIDTH, SRC_RESERVE_HEIGTH));
let _ = canvas.fill_rect(Rect::new(
0,
SCR_HEIGHT as i32 - SRC_RESERVE_HEIGTH as i32,
SCR_WIDTH,
SRC_RESERVE_HEIGTH,
));
}
// from: https://www.libsdl.org/tmp/SDL/test/testdrawchessboard.c
// adapted for shogi
fn draw_grid(canvas: &mut Canvas<Window>) {
canvas.set_draw_color(Color::RGB(0xFF, 0xCE, 0x9E));
let mut row = 0;
while row < 9 {
let mut x = row % 2;
for _ in (row % 2)..(5 + (row % 2)) {
let rect = Rect::new(
x * SQR_SIZE as i32,
row * SQR_SIZE as i32 + SRC_RESERVE_HEIGTH as i32,
SQR_SIZE,
SQR_SIZE,
);
x += 2;
let _ = canvas.fill_rect(rect);
}
row += 1;
}
}
//----------------------------------------------------------------
fn draw_select(p: Position, canvas: &mut Canvas<Window>) {
canvas.set_draw_color(Color::RGB(5, 150, 5));
let x = (8 - p.0 % 9) * SQR_SIZE as u16;
let y = p.0 / 9 * SQR_SIZE as u16 + SRC_RESERVE_HEIGTH as u16;
let _ = canvas.fill_rect(Rect::new(x as i32, y as i32, SQR_SIZE, SQR_SIZE));
}
| 38.717822 | 99 | 0.466905 |
cc65e95766c1d66cffe2d29969ead8ef6c5689cc | 46,894 | //! Convert HTML to text formats.
//!
//! This crate renders HTML into a text format, wrapped to a specified width.
//! This can either be plain text or with extra annotations to (for example)
//! show in a terminal which supports colours.
//!
//! # Examples
//!
//! ```rust
//! # use html2text::from_read;
//! let html = b"
//! <ul>
//! <li>Item one</li>
//! <li>Item two</li>
//! <li>Item three</li>
//! </ul>";
//! assert_eq!(from_read(&html[..], 20),
//! "\
//! * Item one
//! * Item two
//! * Item three
//! ");
//! ```
//! A couple of simple demonstration programs are included as examples:
//!
//! ### html2text
//!
//! The simplest example uses `from_read` to convert HTML on stdin into plain
//! text:
//!
//! ```sh
//! $ cargo run --example html2text < foo.html
//! [...]
//! ```
//!
//! ### html2term
//!
//! A very simple example of using the rich interface (`from_read_rich`) for a
//! slightly interactive console HTML viewer is provided as `html2term`.
//!
//! ```sh
//! $ cargo run --example html2term foo.html
//! [...]
//! ```
//!
//! Note that this example takes the HTML file as a parameter so that it can
//! read keys from stdin.
//!
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![deny(missing_docs)]
#[macro_use]
extern crate html5ever;
extern crate unicode_width;
#[macro_use]
mod macros;
pub mod render;
use render::text_renderer::{
PlainDecorator, RenderLine, RichAnnotation, RichDecorator, TaggedLine, TextDecorator,
TextRenderer,
};
use render::Renderer;
use html5ever::driver::ParseOpts;
use html5ever::parse_document;
use html5ever::tendril::TendrilSink;
use html5ever::tree_builder::TreeBuilderOpts;
use markup5ever_rcdom::{
self, Handle,
NodeData::{Comment, Document, Element},
RcDom,
};
use std::cell::Cell;
use std::cmp::max;
use std::io;
use std::io::Write;
use std::iter::{once, repeat};
use std::ops::{Deref, DerefMut};
/// A dummy writer which does nothing
struct Discard {}
impl Write for Discard {
fn write(&mut self, bytes: &[u8]) -> std::result::Result<usize, io::Error> {
Ok(bytes.len())
}
fn flush(&mut self) -> std::result::Result<(), io::Error> {
Ok(())
}
}
const MIN_WIDTH: usize = 5;
/// Size information/estimate
#[derive(Debug, Copy, Clone)]
pub struct SizeEstimate {
size: usize, // Rough overall size
min_width: usize, // The narrowest possible
}
impl Default for SizeEstimate {
fn default() -> SizeEstimate {
SizeEstimate {
size: 0,
min_width: 0,
}
}
}
impl SizeEstimate {
/// Combine two estimates into one (add size and widest required)
pub fn add(self, other: SizeEstimate) -> SizeEstimate {
SizeEstimate {
size: self.size + other.size,
min_width: max(self.min_width, other.min_width),
}
}
}
#[derive(Debug)]
/// Render tree table cell
pub struct RenderTableCell {
colspan: usize,
content: Vec<RenderNode>,
size_estimate: Cell<Option<SizeEstimate>>,
col_width: Option<usize>, // Actual width to use
}
impl RenderTableCell {
/// Render this cell to a builder.
pub fn render<T: Write, R: Renderer>(&mut self, _builder: &mut R, _err_out: &mut T) {
unimplemented!()
//render_tree_children_to_string(builder, &mut self.content, err_out)
}
/// Calculate or return the estimate size of the cell
pub fn get_size_estimate(&self) -> SizeEstimate {
if self.size_estimate.get().is_none() {
let size = self
.content
.iter()
.map(|node| node.get_size_estimate())
.fold(Default::default(), SizeEstimate::add);
self.size_estimate.set(Some(size));
}
self.size_estimate.get().unwrap()
}
}
#[derive(Debug)]
/// Render tree table row
pub struct RenderTableRow {
cells: Vec<RenderTableCell>,
col_sizes: Option<Vec<usize>>,
}
impl RenderTableRow {
/// Return a mutable iterator over the cells.
pub fn cells(&self) -> std::slice::Iter<RenderTableCell> {
self.cells.iter()
}
/// Return a mutable iterator over the cells.
pub fn cells_mut(&mut self) -> std::slice::IterMut<RenderTableCell> {
self.cells.iter_mut()
}
/// Count the number of cells in the row.
/// Takes into account colspan.
pub fn num_cells(&self) -> usize {
self.cells.iter().map(|cell| cell.colspan).sum()
}
/// Return an iterator over (column, &cell)s, which
/// takes into account colspan.
pub fn cell_columns(&mut self) -> Vec<(usize, &mut RenderTableCell)> {
let mut result = Vec::new();
let mut colno = 0;
for cell in &mut self.cells {
let colspan = cell.colspan;
result.push((colno, cell));
colno += colspan;
}
result
}
/// Return the contained cells as RenderNodes, annotated with their
/// widths if available. Skips cells with no width allocated.
pub fn into_cells(self) -> Vec<RenderNode> {
let mut result = Vec::new();
let mut colno = 0;
let col_sizes = self.col_sizes.unwrap();
for mut cell in self.cells {
let colspan = cell.colspan;
let col_width: usize = col_sizes[colno..colno + cell.colspan].iter().sum();
if col_width > 1 {
cell.col_width = Some(col_width - 1);
result.push(RenderNode::new(RenderNodeInfo::TableCell(cell)));
}
colno += colspan;
}
result
}
}
#[derive(Debug)]
/// A representation of a table render tree with metadata.
pub struct RenderTable {
rows: Vec<RenderTableRow>,
num_columns: usize,
size_estimate: Cell<Option<SizeEstimate>>,
}
impl RenderTable {
/// Create a new RenderTable with the given rows
pub fn new(rows: Vec<RenderTableRow>) -> RenderTable {
let num_columns = rows.iter().map(|r| r.num_cells()).max().unwrap_or(0);
RenderTable {
rows: rows,
num_columns: num_columns,
size_estimate: Cell::new(None),
}
}
/// Return an iterator over the rows.
pub fn rows(&self) -> std::slice::Iter<RenderTableRow> {
self.rows.iter()
}
/// Return an iterator over the rows.
pub fn rows_mut(&mut self) -> std::slice::IterMut<RenderTableRow> {
self.rows.iter_mut()
}
/// Consume this and return a Vec<RenderNode> containing the children;
/// the children know the column sizes required.
pub fn into_rows(self, col_sizes: Vec<usize>) -> Vec<RenderNode> {
self.rows
.into_iter()
.map(|mut tr| {
tr.col_sizes = Some(col_sizes.clone());
RenderNode::new(RenderNodeInfo::TableRow(tr))
})
.collect()
}
fn calc_size_estimate(&self) {
if self.num_columns == 0 {
self.size_estimate.set(Some(SizeEstimate {
size: 0,
min_width: 0,
}));
return;
}
let mut sizes: Vec<SizeEstimate> = vec![Default::default(); self.num_columns];
// For now, a simple estimate based on adding up sub-parts.
for row in self.rows() {
let mut colno = 0usize;
for cell in row.cells() {
let cellsize = cell.get_size_estimate();
for colnum in 0..cell.colspan {
sizes[colno + colnum].size += cellsize.size / cell.colspan;
sizes[colno + colnum].min_width = max(
sizes[colno + colnum].min_width / cell.colspan,
cellsize.min_width,
);
}
colno += cell.colspan;
}
}
let size = sizes.iter().map(|s| s.size).sum(); // Include borders?
let min_width = sizes.iter().map(|s| s.min_width).sum::<usize>() + self.num_columns - 1;
self.size_estimate.set(Some(SizeEstimate {
size: size,
min_width: min_width,
}));
}
/// Calculate and store (or return stored value) of estimated size
pub fn get_size_estimate(&self) -> SizeEstimate {
if self.size_estimate.get().is_none() {
self.calc_size_estimate();
}
self.size_estimate.get().unwrap()
}
}
/// The node-specific information distilled from the DOM.
#[derive(Debug)]
pub enum RenderNodeInfo {
/// Some text.
Text(String),
/// A group of nodes collected together.
Container(Vec<RenderNode>),
/// A link with contained nodes
Link(String, Vec<RenderNode>),
/// An emphasised region
Em(Vec<RenderNode>),
/// A strong region
Strong(Vec<RenderNode>),
/// A code region
Code(Vec<RenderNode>),
/// An image (title)
Img(String),
/// A block element with children
Block(Vec<RenderNode>),
/// A header (h1, h2, ...) with children
Header(usize, Vec<RenderNode>),
/// A Div element with children
Div(Vec<RenderNode>),
/// A preformatted region.
Pre(Vec<RenderNode>),
/// A blockquote
BlockQuote(Vec<RenderNode>),
/// An unordered list
Ul(Vec<RenderNode>),
/// An ordered list
Ol(i64, Vec<RenderNode>),
/// A line break
Break,
/// A table
Table(RenderTable),
/// A set of table rows (from either <thead> or <tbody>
TableBody(Vec<RenderTableRow>),
/// Table row (must only appear within a table body)
TableRow(RenderTableRow),
/// Table cell (must only appear within a table row)
TableCell(RenderTableCell),
/// Start of a named HTML fragment
FragStart(String),
}
/// Common fields from a node.
#[derive(Debug)]
pub struct RenderNode {
size_estimate: Cell<Option<SizeEstimate>>,
info: RenderNodeInfo,
}
impl RenderNode {
/// Create a node from the RenderNodeInfo.
pub fn new(info: RenderNodeInfo) -> RenderNode {
RenderNode {
size_estimate: Cell::new(None),
info: info,
}
}
/// Get a size estimate (~characters)
pub fn get_size_estimate(&self) -> SizeEstimate {
// If it's already calculated, then just return the answer.
if let Some(s) = self.size_estimate.get() {
return s;
};
use RenderNodeInfo::*;
// Otherwise, make an estimate.
let estimate = match self.info {
Text(ref t) | Img(ref t) => {
let len = t.trim().len();
SizeEstimate {
size: len,
min_width: if len > 0 { MIN_WIDTH } else { 0 },
}
}
Container(ref v)
| Link(_, ref v)
| Em(ref v)
| Strong(ref v)
| Code(ref v)
| Block(ref v)
| Div(ref v)
| Pre(ref v)
| BlockQuote(ref v)
| Ul(ref v)
| Ol(_, ref v) => v
.iter()
.map(RenderNode::get_size_estimate)
.fold(Default::default(), SizeEstimate::add),
Header(level, ref v) => v
.iter()
.map(RenderNode::get_size_estimate)
.fold(Default::default(), SizeEstimate::add)
.add(SizeEstimate {
size: 0,
min_width: MIN_WIDTH + level + 2,
}),
Break => SizeEstimate {
size: 1,
min_width: 1,
},
Table(ref t) => t.get_size_estimate(),
TableRow(_) | TableBody(_) | TableCell(_) => unimplemented!(),
FragStart(_) => Default::default(),
};
self.size_estimate.set(Some(estimate));
estimate
}
}
fn precalc_size_estimate<'a>(node: &'a RenderNode) -> TreeMapResult<(), &'a RenderNode, ()> {
use RenderNodeInfo::*;
if node.size_estimate.get().is_some() {
return TreeMapResult::Nothing;
}
match node.info {
Text(_) | Img(_) | Break | FragStart(_) => {
let _ = node.get_size_estimate();
TreeMapResult::Nothing
}
Container(ref v)
| Link(_, ref v)
| Em(ref v)
| Strong(ref v)
| Code(ref v)
| Block(ref v)
| Div(ref v)
| Pre(ref v)
| BlockQuote(ref v)
| Ul(ref v)
| Ol(_, ref v)
| Header(_, ref v) => TreeMapResult::PendingChildren {
children: v.iter().collect(),
cons: Box::new(move |_, _cs| {
node.get_size_estimate();
None
}),
prefn: None,
postfn: None,
},
Table(ref t) => {
/* Return all the indirect children which are RenderNodes. */
let mut children = Vec::new();
for row in &t.rows {
for cell in &row.cells {
children.extend(cell.content.iter());
}
}
TreeMapResult::PendingChildren {
children: children,
cons: Box::new(move |_, _cs| {
node.get_size_estimate();
None
}),
prefn: None,
postfn: None,
}
}
TableRow(_) | TableBody(_) | TableCell(_) => unimplemented!(),
}
}
/// Make a Vec of RenderNodes from the children of a node.
fn children_to_render_nodes<T: Write>(handle: Handle, err_out: &mut T) -> Vec<RenderNode> {
/* process children, but don't add anything */
let children = handle
.children
.borrow()
.iter()
.flat_map(|ch| dom_to_render_tree(ch.clone(), err_out))
.collect();
children
}
/// Make a Vec of RenderNodes from the <li>children of a node.
fn list_children_to_render_nodes<T: Write>(handle: Handle, err_out: &mut T) -> Vec<RenderNode> {
let mut children = Vec::new();
for child in handle.children.borrow().iter() {
match child.data {
Element { ref name, .. } => match name.expanded() {
expanded_name!(html "li") => {
let li_children = children_to_render_nodes(child.clone(), err_out);
children.push(RenderNode::new(RenderNodeInfo::Block(li_children)));
}
_ => {}
},
Comment { .. } => {}
_ => {
html_trace!("Unhandled in list: {:?}\n", child);
}
}
}
children
}
/// Convert a table into a RenderNode
fn table_to_render_tree<'a, 'b, T: Write>(
handle: Handle,
_err_out: &'b mut T,
) -> TreeMapResult<'a, (), Handle, RenderNode> {
pending(handle, |_, rowset| {
let mut rows = vec![];
for bodynode in rowset {
if let RenderNodeInfo::TableBody(body) = bodynode.info {
rows.extend(body);
} else {
html_trace!("Found in table: {:?}", bodynode.info);
}
}
Some(RenderNode::new(RenderNodeInfo::Table(RenderTable::new(
rows,
))))
})
}
/// Add rows from a thead or tbody.
fn tbody_to_render_tree<'a, 'b, T: Write>(
handle: Handle,
_err_out: &'b mut T,
) -> TreeMapResult<'a, (), Handle, RenderNode> {
pending(handle, |_, rowchildren| {
let rows = rowchildren
.into_iter()
.flat_map(|rownode| {
if let RenderNodeInfo::TableRow(row) = rownode.info {
Some(row)
} else {
html_trace!(" [[tbody child: {:?}]]", rownode);
None
}
})
.collect();
Some(RenderNode::new(RenderNodeInfo::TableBody(rows)))
})
}
/// Convert a table row to a RenderTableRow
fn tr_to_render_tree<'a, 'b, T: Write>(
handle: Handle,
_err_out: &'b mut T,
) -> TreeMapResult<'a, (), Handle, RenderNode> {
pending(handle, |_, cellnodes| {
let cells = cellnodes
.into_iter()
.flat_map(|cellnode| {
if let RenderNodeInfo::TableCell(cell) = cellnode.info {
Some(cell)
} else {
html_trace!(" [[tr child: {:?}]]", cellnode);
None
}
})
.collect();
Some(RenderNode::new(RenderNodeInfo::TableRow(RenderTableRow {
cells,
col_sizes: None,
})))
})
}
/// Convert a single table cell to a render node.
fn td_to_render_tree<'a, 'b, T: Write>(
handle: Handle,
_err_out: &'b mut T,
) -> TreeMapResult<'a, (), Handle, RenderNode> {
let mut colspan = 1;
if let Element { ref attrs, .. } = handle.data {
for attr in attrs.borrow().iter() {
if &attr.name.local == "colspan" {
let v: &str = &*attr.value;
colspan = v.parse().unwrap_or(1);
}
}
}
pending(handle, move |_, children| {
Some(RenderNode::new(RenderNodeInfo::TableCell(
RenderTableCell {
colspan: colspan,
content: children,
size_estimate: Cell::new(None),
col_width: None,
},
)))
})
}
/// A reducer which combines results from mapping children into
/// the result for the current node. Takes a context and a
/// vector of results and returns a new result (or nothing).
type ResultReducer<'a, C, R> = dyn Fn(&mut C, Vec<R>) -> Option<R> + 'a;
/// A closure to call before processing a child node.
type ChildPreFn<C, N> = dyn Fn(&mut C, &N);
/// A closure to call after processing a child node,
/// before adding the result to the processed results
/// vector.
type ChildPostFn<C, R> = dyn Fn(&mut C, &R);
/// The result of trying to render one node.
enum TreeMapResult<'a, C, N, R> {
/// A completed result.
Finished(R),
/// Deferred completion - can be turned into a result
/// once the vector of children are processed.
PendingChildren {
children: Vec<N>,
cons: Box<ResultReducer<'a, C, R>>,
prefn: Option<Box<ChildPreFn<C, N>>>,
postfn: Option<Box<ChildPostFn<C, R>>>,
},
/// Nothing (e.g. a comment or other ignored element).
Nothing,
}
fn tree_map_reduce<'a, C, N, R, M>(context: &mut C, top: N, mut process_node: M) -> Option<R>
where
M: for<'c> FnMut(&'c mut C, N) -> TreeMapResult<'a, C, N, R>,
{
/// A node partially decoded, waiting for its children to
/// be processed.
struct PendingNode<'a, C, R, N> {
/// How to make the node once finished
construct: Box<ResultReducer<'a, C, R>>,
/// Called before processing each child
prefn: Option<Box<ChildPreFn<C, N>>>,
/// Called after processing each child
postfn: Option<Box<ChildPostFn<C, R>>>,
/// Children already processed
children: Vec<R>,
/// Iterator of child nodes not yet processed
to_process: std::vec::IntoIter<N>,
}
let mut pending_stack = vec![PendingNode {
// We only expect one child, which we'll just return.
construct: Box::new(|_, mut cs| cs.pop()),
prefn: None,
postfn: None,
children: Vec::new(),
to_process: vec![top].into_iter(),
}];
loop {
// Get the next child node to process
let next_node = pending_stack.last_mut().unwrap().to_process.next();
if let Some(h) = next_node {
pending_stack
.last_mut()
.unwrap()
.prefn
.as_ref()
.map(|ref f| f(context, &h));
match process_node(context, h) {
TreeMapResult::Finished(result) => {
pending_stack
.last_mut()
.unwrap()
.postfn
.as_ref()
.map(|ref f| f(context, &result));
pending_stack.last_mut().unwrap().children.push(result);
}
TreeMapResult::PendingChildren {
children,
cons,
prefn,
postfn,
} => {
pending_stack.push(PendingNode {
construct: cons,
prefn,
postfn,
children: Vec::new(),
to_process: children.into_iter(),
});
}
TreeMapResult::Nothing => {}
};
} else {
// No more children, so finally construct the parent.
let completed = pending_stack.pop().unwrap();
let reduced = (completed.construct)(context, completed.children);
if let Some(node) = reduced {
if let Some(parent) = pending_stack.last_mut() {
parent.postfn.as_ref().map(|ref f| f(context, &node));
parent.children.push(node);
} else {
// Finished the whole stack!
break Some(node);
}
} else {
/* Finished the stack, and have nothing */
if pending_stack.is_empty() {
break None;
}
}
}
}
}
/// Convert a DOM tree or subtree into a render tree.
pub fn dom_to_render_tree<T: Write>(handle: Handle, err_out: &mut T) -> Option<RenderNode> {
html_trace!("### dom_to_render_tree: HTML: {:?}", handle);
let result = tree_map_reduce(&mut (), handle, |_, handle| {
process_dom_node(handle, err_out)
});
html_trace!("### dom_to_render_tree: out= {:#?}", result);
result
}
fn pending<'a, F>(handle: Handle, f: F) -> TreeMapResult<'a, (), Handle, RenderNode>
where
//for<'a> F: Fn(&'a mut C, Vec<RenderNode>) -> Option<RenderNode>+'static
for<'r> F: Fn(&'r mut (), std::vec::Vec<RenderNode>) -> Option<RenderNode> + 'static,
{
TreeMapResult::PendingChildren {
children: handle.children.borrow().clone(),
cons: Box::new(f),
prefn: None,
postfn: None,
}
}
/// Prepend a FragmentStart (or analogous) marker to an existing
/// RenderNode.
fn prepend_marker(prefix: RenderNode, mut orig: RenderNode) -> RenderNode {
use RenderNodeInfo::*;
html_trace!("prepend_marker({:?}, {:?})", prefix, orig);
match orig.info {
// For block elements such as Block and Div, we need to insert
// the node at the front of their children array, otherwise
// the renderer is liable to drop the fragment start marker
// _before_ the new line indicating the end of the previous
// paragraph.
//
// For Container, we do the same thing just to make the data
// less pointlessly nested.
Block(ref mut children)
| Div(ref mut children)
| Pre(ref mut children)
| BlockQuote(ref mut children)
| Container(ref mut children)
| TableCell(RenderTableCell {
content: ref mut children,
..
}) => {
children.insert(0, prefix);
// Now return orig, but we do that outside the match so
// that we've given back the borrowed ref 'children'.
}
// For table rows and tables, push down if there's any content.
TableRow(ref mut rrow) => {
// If the row is empty, then there isn't really anything
// to attach the fragment start to.
if rrow.cells.len() > 0 {
rrow.cells[0].content.insert(0, prefix);
}
}
Table(ref mut rtable) => {
// If the row is empty, then there isn't really anything
// to attach the fragment start to.
if rtable.rows.len() > 0 {
let rrow = &mut rtable.rows[0];
if rrow.cells.len() > 0 {
rrow.cells[0].content.insert(0, prefix);
}
}
}
// For anything else, just make a new Container with the
// prefix node and the original one.
_ => {
let result = RenderNode::new(Container(vec![prefix, orig]));
html_trace!("prepend_marker() -> {:?}", result);
return result;
}
}
html_trace!("prepend_marker() -> {:?}", &orig);
orig
}
fn process_dom_node<'a, 'b, T: Write>(
handle: Handle,
err_out: &'b mut T,
) -> TreeMapResult<'a, (), Handle, RenderNode> {
use RenderNodeInfo::*;
use TreeMapResult::*;
match handle.clone().data {
Document => pending(handle, |&mut (), cs| Some(RenderNode::new(Container(cs)))),
Comment { .. } => Nothing,
Element {
ref name,
ref attrs,
..
} => {
let mut frag_from_name_attr = false;
let result = match name.expanded() {
expanded_name!(html "html")
| expanded_name!(html "span")
| expanded_name!(html "body") => {
/* process children, but don't add anything */
pending(handle, |_, cs| Some(RenderNode::new(Container(cs))))
}
expanded_name!(html "link")
| expanded_name!(html "meta")
| expanded_name!(html "hr")
| expanded_name!(html "script")
| expanded_name!(html "style")
| expanded_name!(html "head") => {
/* Ignore the head and its children */
Nothing
}
expanded_name!(html "a") => {
let borrowed = attrs.borrow();
let mut target = None;
frag_from_name_attr = true;
for attr in borrowed.iter() {
if &attr.name.local == "href" {
target = Some(&*attr.value);
break;
}
}
PendingChildren {
children: handle.children.borrow().clone(),
cons: if let Some(href) = target {
// We need the closure to own the string it's going to use.
// Unfortunately that means we ideally want FnOnce; but
// that doesn't yet work in a Box. Box<FnBox()> does, but
// is unstable. So we'll just move a string in and clone
// it on use.
let href: String = href.into();
Box::new(move |_, cs| Some(RenderNode::new(Link(href.clone(), cs))))
} else {
Box::new(|_, cs| Some(RenderNode::new(Container(cs))))
},
prefn: None,
postfn: None,
}
}
expanded_name!(html "em") => pending(handle, |_, cs| Some(RenderNode::new(Em(cs)))),
expanded_name!(html "strong") => {
pending(handle, |_, cs| Some(RenderNode::new(Strong(cs))))
}
expanded_name!(html "code") => {
pending(handle, |_, cs| Some(RenderNode::new(Code(cs))))
}
expanded_name!(html "img") => {
let borrowed = attrs.borrow();
let mut title = None;
for attr in borrowed.iter() {
if &attr.name.local == "alt" {
title = Some(&*attr.value);
break;
}
}
if let Some(title) = title {
Finished(RenderNode::new(Img(title.into())))
} else {
Nothing
}
}
expanded_name!(html "h1")
| expanded_name!(html "h2")
| expanded_name!(html "h3")
| expanded_name!(html "h4") => {
let level: usize = name.local[1..].parse().unwrap();
pending(handle, move |_, cs| {
Some(RenderNode::new(Header(level, cs)))
})
}
expanded_name!(html "p") => {
pending(handle, |_, cs| Some(RenderNode::new(Block(cs))))
}
expanded_name!(html "div") => {
pending(handle, |_, cs| Some(RenderNode::new(Div(cs))))
}
expanded_name!(html "pre") => {
pending(handle, |_, cs| Some(RenderNode::new(Pre(cs))))
}
expanded_name!(html "br") => Finished(RenderNode::new(Break)),
expanded_name!(html "table") => table_to_render_tree(handle.clone(), err_out),
expanded_name!(html "thead") | expanded_name!(html "tbody") => {
tbody_to_render_tree(handle.clone(), err_out)
}
expanded_name!(html "tr") => tr_to_render_tree(handle.clone(), err_out),
expanded_name!(html "th") | expanded_name!(html "td") => {
td_to_render_tree(handle.clone(), err_out)
}
expanded_name!(html "blockquote") => {
pending(handle, |_, cs| Some(RenderNode::new(BlockQuote(cs))))
}
expanded_name!(html "ul") => Finished(RenderNode::new(Ul(
list_children_to_render_nodes(handle.clone(), err_out),
))),
expanded_name!(html "ol") => {
let borrowed = attrs.borrow();
let mut start = 1;
for attr in borrowed.iter() {
if &attr.name.local == "start" {
start = attr.value.parse().ok().unwrap_or(1);
break;
}
}
Finished(RenderNode::new(Ol(
start,
list_children_to_render_nodes(handle.clone(), err_out),
)))
}
_ => {
html_trace!("Unhandled element: {:?}\n", name.local);
pending(handle, |_, cs| Some(RenderNode::new(Container(cs))))
//None
}
};
let mut fragment = None;
let borrowed = attrs.borrow();
for attr in borrowed.iter() {
if &attr.name.local == "id" || (frag_from_name_attr && &attr.name.local == "name") {
fragment = Some(attr.value.to_string());
break;
}
}
if let Some(fragname) = fragment {
match result {
Finished(node) => {
Finished(prepend_marker(RenderNode::new(FragStart(fragname)), node))
}
Nothing => Finished(RenderNode::new(FragStart(fragname))),
PendingChildren {
children,
cons,
prefn,
postfn,
} => {
let fragname: String = fragname.into();
PendingChildren {
children: children,
prefn: prefn,
postfn: postfn,
cons: Box::new(move |ctx, ch| {
let fragnode = RenderNode::new(FragStart(fragname.clone()));
match cons(ctx, ch) {
None => Some(fragnode),
Some(node) => Some(prepend_marker(fragnode, node)),
}
}),
}
}
}
} else {
result
}
}
markup5ever_rcdom::NodeData::Text { contents: ref tstr } => {
Finished(RenderNode::new(Text((&*tstr.borrow()).into())))
}
_ => {
// NodeData doesn't have a Debug impl.
write!(err_out, "Unhandled node type.\n").unwrap();
Nothing
}
}
}
/// Context to use during tree parsing.
/// This mainly gives access to a Renderer, but needs to be able to push
/// new ones on for nested structures.
struct BuilderStack<R: Renderer> {
builders: Vec<R>,
}
impl<R: Renderer> BuilderStack<R> {
pub fn new(builder: R) -> BuilderStack<R> {
BuilderStack {
builders: vec![builder],
}
}
/// Push a new builder onto the stack
pub fn push(&mut self, builder: R) {
self.builders.push(builder);
}
/// Pop off the top builder and return it.
/// Panics if empty
pub fn pop(&mut self) -> R {
self.builders.pop().unwrap()
}
/// Pop off the only builder and return it.
/// panics if there aren't exactly 1 available.
pub fn into_inner(mut self) -> R {
assert_eq!(self.builders.len(), 1);
self.builders.pop().unwrap()
}
}
impl<R: Renderer> Deref for BuilderStack<R> {
type Target = R;
fn deref(&self) -> &R {
self.builders.last().expect("Underflow in BuilderStack")
}
}
impl<R: Renderer> DerefMut for BuilderStack<R> {
fn deref_mut(&mut self) -> &mut R {
self.builders.last_mut().expect("Underflow in BuilderStack")
}
}
fn render_tree_to_string<T: Write, R: Renderer>(
builder: R,
tree: RenderNode,
err_out: &mut T,
) -> R {
/* Phase 1: get size estimates. */
tree_map_reduce(&mut (), &tree, |_, node| precalc_size_estimate(&node));
/* Phase 2: actually render. */
let mut bs = BuilderStack::new(builder);
tree_map_reduce(&mut bs, tree, |builders, node| {
do_render_node(builders, node, err_out)
});
bs.into_inner()
}
fn pending2<
'a,
R: Renderer,
F: Fn(&mut BuilderStack<R>, Vec<Option<R>>) -> Option<Option<R>> + 'static,
>(
children: Vec<RenderNode>,
f: F,
) -> TreeMapResult<'a, BuilderStack<R>, RenderNode, Option<R>> {
TreeMapResult::PendingChildren {
children: children,
cons: Box::new(f),
prefn: None,
postfn: None,
}
}
fn do_render_node<'a, 'b, T: Write, R: Renderer>(
builder: &mut BuilderStack<R>,
tree: RenderNode,
err_out: &'b mut T,
) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>> {
html_trace!("do_render_node({:?}", tree);
use RenderNodeInfo::*;
use TreeMapResult::*;
match tree.info {
Text(ref tstr) => {
builder.add_inline_text(tstr);
Finished(None)
}
Container(children) => pending2(children, |_, _| Some(None)),
Link(href, children) => {
builder.start_link(&href);
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.end_link();
Some(None)
})
}
Em(children) => {
builder.start_emphasis();
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.end_emphasis();
Some(None)
})
}
Strong(children) => {
builder.start_strong();
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.end_strong();
Some(None)
})
}
Code(children) => {
builder.start_code();
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.end_code();
Some(None)
})
}
Img(title) => {
builder.add_image(&title);
Finished(None)
}
Block(children) => {
builder.start_block();
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.end_block();
Some(None)
})
}
Header(level, children) => {
let min_width = max(builder.width(), 1 + level + 1);
let sub_builder = builder.new_sub_renderer(min_width - (1 + level));
builder.push(sub_builder);
pending2(children, move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
let qs: String = "#".repeat(level) + " ";
builder.start_block();
builder.append_subrender(sub_builder, repeat(&qs[..]));
builder.end_block();
Some(None)
})
}
Div(children) => {
builder.new_line();
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.new_line();
Some(None)
})
}
Pre(children) => {
builder.new_line();
builder.start_pre();
pending2(children, |builder: &mut BuilderStack<R>, _| {
builder.new_line();
builder.end_pre();
Some(None)
})
}
BlockQuote(children) => {
let sub_builder = builder.new_sub_renderer(builder.width() - 2);
builder.push(sub_builder);
pending2(children, |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
builder.start_block();
builder.append_subrender(sub_builder, repeat("> "));
builder.end_block();
Some(None)
})
}
Ul(items) => {
builder.start_block();
TreeMapResult::PendingChildren {
children: items,
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(|builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.new_sub_renderer(builder.width() - 2);
builder.push(sub_builder);
})),
postfn: Some(Box::new(|builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
builder.append_subrender(sub_builder, once("* ").chain(repeat(" ")));
})),
}
}
Ol(start, items) => {
builder.start_block();
let num_items = items.len();
// The prefix width could be at either end if the start is negative.
let min_number = start;
// Assumption: num_items can't overflow isize.
let max_number = start + (num_items as i64) - 1;
let prefix_width_min = format!("{}", min_number).len() + 2;
let prefix_width_max = format!("{}", max_number).len() + 2;
let prefix_width = max(prefix_width_min, prefix_width_max);
let prefixn = format!("{: <width$}", "", width = prefix_width);
let i: Cell<_> = Cell::new(start);
TreeMapResult::PendingChildren {
children: items,
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.new_sub_renderer(builder.width() - prefix_width);
builder.push(sub_builder);
})),
postfn: Some(Box::new(move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
let prefix1 = format!("{}.", i.get());
let prefix1 = format!("{: <width$}", prefix1, width = prefix_width);
builder.append_subrender(
sub_builder,
once(prefix1.as_str()).chain(repeat(prefixn.as_str())),
);
i.set(i.get() + 1);
})),
}
}
Break => {
builder.new_line_hard();
Finished(None)
}
Table(tab) => render_table_tree(builder.deref_mut(), tab, err_out),
TableRow(row) => render_table_row(builder.deref_mut(), row, err_out),
TableBody(_) => unimplemented!("Unexpected TableBody while rendering"),
TableCell(cell) => render_table_cell(builder.deref_mut(), cell, err_out),
FragStart(fragname) => {
builder.record_frag_start(&fragname);
Finished(None)
}
}
}
fn render_table_tree<T: Write, R: Renderer>(
builder: &mut R,
table: RenderTable,
_err_out: &mut T,
) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>> {
/* Now lay out the table. */
let num_columns = table.num_columns;
/* Heuristic: scale the column widths according to how much content there is. */
let mut col_sizes: Vec<SizeEstimate> = vec![Default::default(); num_columns];
for row in table.rows() {
let mut colno = 0;
for cell in row.cells() {
// FIXME: get_size_estimate is still recursive.
let mut estimate = cell.get_size_estimate();
// If the cell has a colspan>1, then spread its size between the
// columns.
estimate.size /= cell.colspan;
estimate.min_width /= cell.colspan;
for i in 0..cell.colspan {
col_sizes[colno + i] = (col_sizes[colno + i]).add(estimate);
}
colno += cell.colspan;
}
}
let tot_size: usize = col_sizes.iter().map(|est| est.size).sum();
let width = builder.width();
let mut col_widths: Vec<usize> = col_sizes
.iter()
.map(|sz| {
if sz.size == 0 {
0
} else {
max(sz.size * width / tot_size, sz.min_width)
}
})
.collect();
/* The minimums may have put the total width too high */
while col_widths.iter().cloned().sum::<usize>() > width {
let (i, _) = col_widths
.iter()
.cloned()
.enumerate()
.max_by_key(|&(colno, width)| {
(
width.saturating_sub(col_sizes[colno].min_width),
width,
usize::max_value() - colno,
)
})
.unwrap();
col_widths[i] -= 1;
}
if !col_widths.is_empty() {
// Slight fudge; we're not drawing extreme edges, so one of the columns
// can gets a free character cell from not having a border.
// make it the last.
let last = col_widths.len() - 1;
col_widths[last] += 1;
}
builder.start_block();
builder.add_horizontal_border();
TreeMapResult::PendingChildren {
children: table.into_rows(col_widths),
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(|_, _| {})),
postfn: Some(Box::new(|_, _| {})),
}
}
fn render_table_row<T: Write, R: Renderer>(
_builder: &mut R,
row: RenderTableRow,
_err_out: &mut T,
) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>> {
TreeMapResult::PendingChildren {
children: row.into_cells(),
cons: Box::new(|builders, children| {
let children: Vec<_> = children.into_iter().map(Option::unwrap).collect();
if children.iter().any(|c| !c.empty()) {
builders.append_columns_with_borders(children, true);
}
Some(None)
}),
prefn: Some(Box::new(|builder: &mut BuilderStack<R>, node| {
if let RenderNodeInfo::TableCell(ref cell) = node.info {
let sub_builder = builder.new_sub_renderer(cell.col_width.unwrap());
builder.push(sub_builder);
} else {
panic!()
}
})),
postfn: Some(Box::new(|_builder: &mut BuilderStack<R>, _| {})),
}
}
fn render_table_cell<T: Write, R: Renderer>(
_builder: &mut R,
cell: RenderTableCell,
_err_out: &mut T,
) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>> {
pending2(cell.content, |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
Some(Some(sub_builder))
})
}
/// Reads HTML from `input`, decorates it using `decorator`, and
/// returns a `String` with text wrapped to `width` columns.
pub fn from_read_with_decorator<R, D>(mut input: R, width: usize, decorator: D) -> String
where
R: io::Read,
D: TextDecorator,
{
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut input)
.unwrap();
let builder = TextRenderer::new(width, decorator);
let render_tree = dom_to_render_tree(dom.document.clone(), &mut Discard {}).unwrap();
let builder = render_tree_to_string(builder, render_tree, &mut Discard {});
builder.into_string()
}
/// Reads HTML from `input`, and returns a `String` with text wrapped to
/// `width` columns.
pub fn from_read<R>(input: R, width: usize) -> String
where
R: io::Read,
{
let decorator = PlainDecorator::new();
from_read_with_decorator(input, width, decorator)
}
/// Reads HTML from `input`, and returns text wrapped to `width` columns.
/// The text is returned as a `Vec<TaggedLine<_>>`; the annotations are vectors
/// of `RichAnnotation`. The "outer" annotation comes first in the `Vec`.
pub fn from_read_rich<R>(mut input: R, width: usize) -> Vec<TaggedLine<Vec<RichAnnotation>>>
where
R: io::Read,
{
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut input)
.unwrap();
let decorator = RichDecorator::new();
let builder = TextRenderer::new(width, decorator);
let render_tree = dom_to_render_tree(dom.document.clone(), &mut Discard {}).unwrap();
let builder = render_tree_to_string(builder, render_tree, &mut Discard {});
builder
.into_lines()
.into_iter()
.map(RenderLine::into_tagged_line)
.collect()
}
#[cfg(test)]
mod tests;
| 33.809661 | 100 | 0.51529 |
896f0d720e01bc7a4c20076e9807ab53f7330315 | 436 | use hello_macro::HelloMacro;
use hello_macro_derive::HelloMacro;
#[macro_export]
macro_rules! vec2 {
( $( $x:expr ),* ) => {
{
let mut temp_vec = Vec::new();
$(
temp_vec.push($x);
)*
temp_vec
}
};
}
#[derive(HelloMacro)]
struct Pancakes;
pub fn run() {
let v = vec2![1, 2, 3];
println!("v = {:?}", v);
Pancakes::hello_macro();
}
| 15.034483 | 42 | 0.470183 |
03844b83d7988a402e6ff6b03de040b2419a5314 | 7,302 | // Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0.
use crate::time::{monotonic_raw_now, Instant};
use std::cmp::{Ord, Ordering, Reverse};
use std::collections::BinaryHeap;
use std::sync::{mpsc, Arc};
use std::thread::Builder;
use std::time::Duration;
use time::Timespec;
use tokio_executor::park::ParkThread;
use tokio_timer::{self, clock::Clock, clock::Now, timer::Handle, Delay};
pub struct Timer<T> {
pending: BinaryHeap<Reverse<TimeoutTask<T>>>,
}
impl<T> Timer<T> {
pub fn new(capacity: usize) -> Self {
Timer {
pending: BinaryHeap::with_capacity(capacity),
}
}
/// Adds a periodic task into the `Timer`.
pub fn add_task(&mut self, timeout: Duration, task: T) {
let task = TimeoutTask {
next_tick: Instant::now() + timeout,
task,
};
self.pending.push(Reverse(task));
}
/// Gets the next `timeout` from the timer.
pub fn next_timeout(&mut self) -> Option<Instant> {
self.pending.peek().map(|task| task.0.next_tick)
}
/// Pops a `TimeoutTask` from the `Timer`, which should be ticked before `instant`.
/// Returns `None` if no tasks should be ticked any more.
///
/// The normal use case is keeping `pop_task_before` until get `None` in order
/// to retrieve all available events.
pub fn pop_task_before(&mut self, instant: Instant) -> Option<T> {
if self
.pending
.peek()
.map_or(false, |t| t.0.next_tick <= instant)
{
return self.pending.pop().map(|t| t.0.task);
}
None
}
}
#[derive(Debug)]
struct TimeoutTask<T> {
next_tick: Instant,
task: T,
}
impl<T> PartialEq for TimeoutTask<T> {
fn eq(&self, other: &TimeoutTask<T>) -> bool {
self.next_tick == other.next_tick
}
}
impl<T> Eq for TimeoutTask<T> {}
impl<T> PartialOrd for TimeoutTask<T> {
fn partial_cmp(&self, other: &TimeoutTask<T>) -> Option<Ordering> {
self.next_tick.partial_cmp(&other.next_tick)
}
}
impl<T> Ord for TimeoutTask<T> {
fn cmp(&self, other: &TimeoutTask<T>) -> Ordering {
// TimeoutTask.next_tick must have same type of instants.
self.partial_cmp(other).unwrap()
}
}
lazy_static! {
pub static ref GLOBAL_TIMER_HANDLE: Handle = start_global_timer();
}
fn start_global_timer() -> Handle {
let (tx, rx) = mpsc::channel();
Builder::new()
.name(thd_name!("timer"))
.spawn(move || {
tikv_alloc::add_thread_memory_accessor();
let mut timer = tokio_timer::Timer::default();
tx.send(timer.handle()).unwrap();
loop {
timer.turn(None).unwrap();
}
})
.unwrap();
rx.recv().unwrap()
}
/// A struct that marks the *zero* time.
///
/// A *zero* time can be any time, as what it represents is `Instant`,
/// which is Opaque.
struct TimeZero {
/// An arbitrary time used as the zero time.
///
/// Note that `zero` doesn't have to be related to `steady_time_point`, as what's
/// observed here is elapsed time instead of time point.
zero: std::time::Instant,
/// A base time point.
///
/// The source of time point should grow steady.
steady_time_point: Timespec,
}
/// A clock that produces time in a steady speed.
///
/// Time produced by the clock is not affected by clock jump or time adjustment.
/// Internally it uses CLOCK_MONOTONIC_RAW to get a steady time source.
///
/// `Instant`s produced by this clock can't be compared or used to calculate elapse
/// unless they are produced using the same zero time.
#[derive(Clone)]
pub struct SteadyClock {
zero: Arc<TimeZero>,
}
lazy_static! {
static ref STEADY_CLOCK: SteadyClock = SteadyClock {
zero: Arc::new(TimeZero {
zero: std::time::Instant::now(),
steady_time_point: monotonic_raw_now(),
}),
};
}
impl Default for SteadyClock {
#[inline]
fn default() -> SteadyClock {
STEADY_CLOCK.clone()
}
}
impl Now for SteadyClock {
#[inline]
fn now(&self) -> std::time::Instant {
let n = monotonic_raw_now();
let dur = Instant::elapsed_duration(n, self.zero.steady_time_point);
self.zero.zero + dur
}
}
/// A timer that creates steady delays.
///
/// Delay created by this timer will not be affected by time adjustment.
#[derive(Clone)]
pub struct SteadyTimer {
clock: SteadyClock,
handle: Handle,
}
impl SteadyTimer {
/// Creates a delay future that will be notified after the given duration.
pub fn delay(&self, dur: Duration) -> Delay {
self.handle.delay(self.clock.now() + dur)
}
}
lazy_static! {
static ref GLOBAL_STEADY_TIMER: SteadyTimer = start_global_steady_timer();
}
impl Default for SteadyTimer {
#[inline]
fn default() -> SteadyTimer {
GLOBAL_STEADY_TIMER.clone()
}
}
fn start_global_steady_timer() -> SteadyTimer {
let (tx, rx) = mpsc::channel();
let clock = SteadyClock::default();
let clock_ = clock.clone();
Builder::new()
.name(thd_name!("steady-timer"))
.spawn(move || {
let c = Clock::new_with_now(clock_);
let mut timer = tokio_timer::Timer::new_with_now(ParkThread::new(), c);
tx.send(timer.handle()).unwrap();
loop {
timer.turn(None).unwrap();
}
})
.unwrap();
SteadyTimer {
clock,
handle: rx.recv().unwrap(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::compat::Future01CompatExt;
use futures::executor::block_on;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum Task {
A,
B,
C,
}
#[test]
fn test_timer() {
let mut timer = Timer::new(10);
timer.add_task(Duration::from_millis(20), Task::A);
timer.add_task(Duration::from_millis(150), Task::C);
timer.add_task(Duration::from_millis(100), Task::B);
assert_eq!(timer.pending.len(), 3);
let tick_time = timer.next_timeout().unwrap();
assert_eq!(timer.pop_task_before(tick_time).unwrap(), Task::A);
assert_eq!(timer.pop_task_before(tick_time), None);
let tick_time = timer.next_timeout().unwrap();
assert_eq!(timer.pop_task_before(tick_time).unwrap(), Task::B);
assert_eq!(timer.pop_task_before(tick_time), None);
let tick_time = timer.next_timeout().unwrap();
assert_eq!(timer.pop_task_before(tick_time).unwrap(), Task::C);
assert_eq!(timer.pop_task_before(tick_time), None);
}
#[test]
fn test_global_timer() {
let handle = super::GLOBAL_TIMER_HANDLE.clone();
let delay =
handle.delay(::std::time::Instant::now() + std::time::Duration::from_millis(100));
let timer = Instant::now();
block_on(delay.compat()).unwrap();
assert!(timer.elapsed() >= Duration::from_millis(100));
}
#[test]
fn test_global_steady_timer() {
let t = SteadyTimer::default();
let timer = t.clock.now();
let delay = t.delay(Duration::from_millis(100));
block_on(delay.compat()).unwrap();
assert!(timer.elapsed() >= Duration::from_millis(100));
}
}
| 28.523438 | 94 | 0.607779 |
d587344d6549b5d6b668e02c92aeb403613c5663 | 32,201 | use crate::{CoreStage, Plugin, PluginGroup, PluginGroupBuilder, StartupSchedule, StartupStage};
pub use bevy_derive::AppLabel;
use bevy_ecs::{
event::{Event, Events},
prelude::{FromWorld, IntoExclusiveSystem},
schedule::{
IntoSystemDescriptor, Schedule, ShouldRun, Stage, StageLabel, State, StateData, SystemSet,
SystemStage,
},
system::Resource,
world::World,
};
use bevy_tasks::{AsyncComputeTaskPool, ComputeTaskPool, IoTaskPool};
use bevy_utils::{tracing::debug, HashMap};
use std::fmt::Debug;
#[cfg(feature = "trace")]
use bevy_utils::tracing::info_span;
bevy_utils::define_label!(AppLabel);
#[allow(clippy::needless_doctest_main)]
/// A container of app logic and data.
///
/// Bundles together the necessary elements like [`World`] and [`Schedule`] to create
/// an ECS-based application. It also stores a pointer to a [runner function](Self::set_runner).
/// The runner is responsible for managing the application's event loop and applying the
/// [`Schedule`] to the [`World`] to drive application logic.
///
/// # Examples
///
/// Here is a simple "Hello World" Bevy app:
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// fn main() {
/// App::new()
/// .add_system(hello_world_system)
/// .run();
/// }
///
/// fn hello_world_system() {
/// println!("hello world");
/// }
/// ```
pub struct App {
/// The main ECS [`World`] of the [`App`].
/// This stores and provides access to all the main data of the application.
/// The systems of the [`App`] will run using this [`World`].
/// If additional separate [`World`]-[`Schedule`] pairs are needed, you can use [`sub_app`](App::add_sub_app)s.
pub world: World,
/// The [runner function](Self::set_runner) is primarily responsible for managing
/// the application's event loop and advancing the [`Schedule`].
/// Typically, it is not configured manually, but set by one of Bevy's built-in plugins.
/// See `bevy::winit::WinitPlugin` and [`ScheduleRunnerPlugin`](crate::schedule_runner::ScheduleRunnerPlugin).
pub runner: Box<dyn Fn(App)>,
/// A container of [`Stage`]s set to be run in a linear order.
pub schedule: Schedule,
sub_apps: HashMap<Box<dyn AppLabel>, SubApp>,
}
/// Each `SubApp` has its own [`Schedule`] and [`World`], enabling a separation of concerns.
struct SubApp {
app: App,
runner: Box<dyn Fn(&mut World, &mut App)>,
}
impl Default for App {
fn default() -> Self {
let mut app = App::empty();
#[cfg(feature = "bevy_reflect")]
app.init_resource::<bevy_reflect::TypeRegistryArc>();
app.add_default_stages()
.add_event::<AppExit>()
.add_system_to_stage(CoreStage::Last, World::clear_trackers.exclusive_system());
#[cfg(feature = "bevy_ci_testing")]
{
crate::ci_testing::setup_app(&mut app);
}
app
}
}
impl App {
/// Creates a new [`App`] with some default structure to enable core engine features.
/// This is the preferred constructor for most use cases.
pub fn new() -> App {
App::default()
}
/// Creates a new empty [`App`] with minimal default configuration.
///
/// This constructor should be used if you wish to provide a custom schedule, exit handling, cleanup, etc.
pub fn empty() -> App {
Self {
world: Default::default(),
schedule: Default::default(),
runner: Box::new(run_once),
sub_apps: HashMap::default(),
}
}
/// Advances the execution of the [`Schedule`] by one cycle.
///
/// This method also updates sub apps.
///
/// See [`add_sub_app`](Self::add_sub_app) and [`run_once`](Schedule::run_once) for more details.
pub fn update(&mut self) {
#[cfg(feature = "trace")]
let _bevy_frame_update_span = info_span!("frame").entered();
self.schedule.run(&mut self.world);
for sub_app in self.sub_apps.values_mut() {
(sub_app.runner)(&mut self.world, &mut sub_app.app);
}
}
/// Starts the application by calling the app's [runner function](Self::set_runner).
///
/// Finalizes the [`App`] configuration. For general usage, see the example on the item
/// level documentation.
pub fn run(&mut self) {
#[cfg(feature = "trace")]
let _bevy_app_run_span = info_span!("bevy_app").entered();
let mut app = std::mem::replace(self, App::empty());
let runner = std::mem::replace(&mut app.runner, Box::new(run_once));
(runner)(app);
}
/// Adds a [`Stage`] with the given `label` to the last position of the app's
/// [`Schedule`].
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # let mut app = App::new();
/// #
/// app.add_stage("my_stage", SystemStage::parallel());
/// ```
pub fn add_stage<S: Stage>(&mut self, label: impl StageLabel, stage: S) -> &mut Self {
self.schedule.add_stage(label, stage);
self
}
/// Adds a [`Stage`] with the given `label` to the app's [`Schedule`], located
/// immediately after the stage labeled by `target`.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # let mut app = App::new();
/// #
/// app.add_stage_after(CoreStage::Update, "my_stage", SystemStage::parallel());
/// ```
pub fn add_stage_after<S: Stage>(
&mut self,
target: impl StageLabel,
label: impl StageLabel,
stage: S,
) -> &mut Self {
self.schedule.add_stage_after(target, label, stage);
self
}
/// Adds a [`Stage`] with the given `label` to the app's [`Schedule`], located
/// immediately before the stage labeled by `target`.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # let mut app = App::new();
/// #
/// app.add_stage_before(CoreStage::Update, "my_stage", SystemStage::parallel());
/// ```
pub fn add_stage_before<S: Stage>(
&mut self,
target: impl StageLabel,
label: impl StageLabel,
stage: S,
) -> &mut Self {
self.schedule.add_stage_before(target, label, stage);
self
}
/// Adds a [`Stage`] with the given `label` to the last position of the
/// [startup schedule](Self::add_default_stages).
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # let mut app = App::new();
/// #
/// app.add_startup_stage("my_startup_stage", SystemStage::parallel());
/// ```
pub fn add_startup_stage<S: Stage>(&mut self, label: impl StageLabel, stage: S) -> &mut Self {
self.schedule
.stage(StartupSchedule, |schedule: &mut Schedule| {
schedule.add_stage(label, stage)
});
self
}
/// Adds a [startup stage](Self::add_default_stages) with the given `label`, immediately
/// after the stage labeled by `target`.
///
/// The `target` label must refer to a stage inside the startup schedule.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # let mut app = App::new();
/// #
/// app.add_startup_stage_after(
/// StartupStage::Startup,
/// "my_startup_stage",
/// SystemStage::parallel()
/// );
/// ```
pub fn add_startup_stage_after<S: Stage>(
&mut self,
target: impl StageLabel,
label: impl StageLabel,
stage: S,
) -> &mut Self {
self.schedule
.stage(StartupSchedule, |schedule: &mut Schedule| {
schedule.add_stage_after(target, label, stage)
});
self
}
/// Adds a [startup stage](Self::add_default_stages) with the given `label`, immediately
/// before the stage labeled by `target`.
///
/// The `target` label must refer to a stage inside the startup schedule.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// # let mut app = App::new();
/// #
/// app.add_startup_stage_before(
/// StartupStage::Startup,
/// "my_startup_stage",
/// SystemStage::parallel()
/// );
/// ```
pub fn add_startup_stage_before<S: Stage>(
&mut self,
target: impl StageLabel,
label: impl StageLabel,
stage: S,
) -> &mut Self {
self.schedule
.stage(StartupSchedule, |schedule: &mut Schedule| {
schedule.add_stage_before(target, label, stage)
});
self
}
/// Fetches the [`Stage`] of type `T` marked with `label` from the [`Schedule`], then
/// executes the provided `func` passing the fetched stage to it as an argument.
///
/// The `func` argument should be a function or a closure that accepts a mutable reference
/// to a struct implementing `Stage` and returns the same type. That means that it should
/// also assume that the stage has already been fetched successfully.
///
/// See [`stage`](Schedule::stage) for more details.
///
/// # Examples
///
/// Here the closure is used to add a system to the update stage:
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn my_system() {}
/// #
/// app.stage(CoreStage::Update, |stage: &mut SystemStage| {
/// stage.add_system(my_system)
/// });
/// ```
pub fn stage<T: Stage, F: FnOnce(&mut T) -> &mut T>(
&mut self,
label: impl StageLabel,
func: F,
) -> &mut Self {
self.schedule.stage(label, func);
self
}
/// Adds a system to the [update stage](Self::add_default_stages) of the app's [`Schedule`].
///
/// Refer to the [system module documentation](bevy_ecs::system) to see how a system
/// can be defined.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # fn my_system() {}
/// # let mut app = App::new();
/// #
/// app.add_system(my_system);
/// ```
pub fn add_system<Params>(&mut self, system: impl IntoSystemDescriptor<Params>) -> &mut Self {
self.add_system_to_stage(CoreStage::Update, system)
}
/// Adds a [`SystemSet`] to the [update stage](Self::add_default_stages).
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn system_a() {}
/// # fn system_b() {}
/// # fn system_c() {}
/// #
/// app.add_system_set(
/// SystemSet::new()
/// .with_system(system_a)
/// .with_system(system_b)
/// .with_system(system_c),
/// );
/// ```
pub fn add_system_set(&mut self, system_set: SystemSet) -> &mut Self {
self.add_system_set_to_stage(CoreStage::Update, system_set)
}
/// Adds a system to the [`Stage`] identified by `stage_label`.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn my_system() {}
/// #
/// app.add_system_to_stage(CoreStage::PostUpdate, my_system);
/// ```
pub fn add_system_to_stage<Params>(
&mut self,
stage_label: impl StageLabel,
system: impl IntoSystemDescriptor<Params>,
) -> &mut Self {
use std::any::TypeId;
assert!(
stage_label.type_id() != TypeId::of::<StartupStage>(),
"add systems to a startup stage using App::add_startup_system_to_stage"
);
self.schedule.add_system_to_stage(stage_label, system);
self
}
/// Adds a [`SystemSet`] to the [`Stage`] identified by `stage_label`.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn system_a() {}
/// # fn system_b() {}
/// # fn system_c() {}
/// #
/// app.add_system_set_to_stage(
/// CoreStage::PostUpdate,
/// SystemSet::new()
/// .with_system(system_a)
/// .with_system(system_b)
/// .with_system(system_c),
/// );
/// ```
pub fn add_system_set_to_stage(
&mut self,
stage_label: impl StageLabel,
system_set: SystemSet,
) -> &mut Self {
use std::any::TypeId;
assert!(
stage_label.type_id() != TypeId::of::<StartupStage>(),
"add system sets to a startup stage using App::add_startup_system_set_to_stage"
);
self.schedule
.add_system_set_to_stage(stage_label, system_set);
self
}
/// Adds a system to the [startup stage](Self::add_default_stages) of the app's [`Schedule`].
///
/// * For adding a system that runs every frame, see [`add_system`](Self::add_system).
/// * For adding a system to a specific stage, see [`add_system_to_stage`](Self::add_system_to_stage).
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// fn my_startup_system(_commands: Commands) {
/// println!("My startup system");
/// }
///
/// App::new()
/// .add_startup_system(my_startup_system);
/// ```
pub fn add_startup_system<Params>(
&mut self,
system: impl IntoSystemDescriptor<Params>,
) -> &mut Self {
self.add_startup_system_to_stage(StartupStage::Startup, system)
}
/// Adds a [`SystemSet`] to the [startup stage](Self::add_default_stages).
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn startup_system_a() {}
/// # fn startup_system_b() {}
/// # fn startup_system_c() {}
/// #
/// app.add_startup_system_set(
/// SystemSet::new()
/// .with_system(startup_system_a)
/// .with_system(startup_system_b)
/// .with_system(startup_system_c),
/// );
/// ```
pub fn add_startup_system_set(&mut self, system_set: SystemSet) -> &mut Self {
self.add_startup_system_set_to_stage(StartupStage::Startup, system_set)
}
/// Adds a system to the [startup schedule](Self::add_default_stages), in the stage
/// identified by `stage_label`.
///
/// `stage_label` must refer to a stage inside the startup schedule.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn my_startup_system() {}
/// #
/// app.add_startup_system_to_stage(StartupStage::PreStartup, my_startup_system);
/// ```
pub fn add_startup_system_to_stage<Params>(
&mut self,
stage_label: impl StageLabel,
system: impl IntoSystemDescriptor<Params>,
) -> &mut Self {
self.schedule
.stage(StartupSchedule, |schedule: &mut Schedule| {
schedule.add_system_to_stage(stage_label, system)
});
self
}
/// Adds a [`SystemSet`] to the [startup schedule](Self::add_default_stages), in the stage
/// identified by `stage_label`.
///
/// `stage_label` must refer to a stage inside the startup schedule.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # let mut app = App::new();
/// # fn startup_system_a() {}
/// # fn startup_system_b() {}
/// # fn startup_system_c() {}
/// #
/// app.add_startup_system_set_to_stage(
/// StartupStage::PreStartup,
/// SystemSet::new()
/// .with_system(startup_system_a)
/// .with_system(startup_system_b)
/// .with_system(startup_system_c),
/// );
/// ```
pub fn add_startup_system_set_to_stage(
&mut self,
stage_label: impl StageLabel,
system_set: SystemSet,
) -> &mut Self {
self.schedule
.stage(StartupSchedule, |schedule: &mut Schedule| {
schedule.add_system_set_to_stage(stage_label, system_set)
});
self
}
/// Adds a new [`State`] with the given `initial` value.
/// This inserts a new `State<T>` resource and adds a new "driver" to [`CoreStage::Update`].
/// Each stage that uses `State<T>` for system run criteria needs a driver. If you need to use
/// your state in a different stage, consider using [`Self::add_state_to_stage`] or manually
/// adding [`State::get_driver`] to additional stages you need it in.
pub fn add_state<T>(&mut self, initial: T) -> &mut Self
where
T: StateData,
{
self.add_state_to_stage(CoreStage::Update, initial)
}
/// Adds a new [`State`] with the given `initial` value.
/// This inserts a new `State<T>` resource and adds a new "driver" to the given stage.
/// Each stage that uses `State<T>` for system run criteria needs a driver. If you need to use
/// your state in more than one stage, consider manually adding [`State::get_driver`] to the
/// stages you need it in.
pub fn add_state_to_stage<T>(&mut self, stage: impl StageLabel, initial: T) -> &mut Self
where
T: StateData,
{
self.insert_resource(State::new(initial))
.add_system_set_to_stage(stage, State::<T>::get_driver())
}
/// Adds utility stages to the [`Schedule`], giving it a standardized structure.
///
/// Adding those stages is necessary to make some core engine features work, like
/// adding systems without specifying a stage, or registering events. This is however
/// done by default by calling `App::default`, which is in turn called by
/// [`App::new`].
///
/// # The stages
///
/// All the added stages, with the exception of the startup stage, run every time the
/// schedule is invoked. The stages are the following, in order of execution:
///
/// - **First:** Runs at the very start of the schedule execution cycle, even before the
/// startup stage.
/// - **Startup:** This is actually a schedule containing sub-stages. Runs only once
/// when the app starts.
/// - **Pre-startup:** Intended for systems that need to run before other startup systems.
/// - **Startup:** The main startup stage. Startup systems are added here by default.
/// - **Post-startup:** Intended for systems that need to run after other startup systems.
/// - **Pre-update:** Often used by plugins to prepare their internal state before the
/// update stage begins.
/// - **Update:** Intended for user defined logic. Systems are added here by default.
/// - **Post-update:** Often used by plugins to finalize their internal state after the
/// world changes that happened during the update stage.
/// - **Last:** Runs right before the end of the schedule execution cycle.
///
/// The labels for those stages are defined in the [`CoreStage`] and [`StartupStage`] `enum`s.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// #
/// let app = App::empty().add_default_stages();
/// ```
pub fn add_default_stages(&mut self) -> &mut Self {
self.add_stage(CoreStage::First, SystemStage::parallel())
.add_stage(
StartupSchedule,
Schedule::default()
.with_run_criteria(ShouldRun::once)
.with_stage(StartupStage::PreStartup, SystemStage::parallel())
.with_stage(StartupStage::Startup, SystemStage::parallel())
.with_stage(StartupStage::PostStartup, SystemStage::parallel()),
)
.add_stage(CoreStage::PreUpdate, SystemStage::parallel())
.add_stage(CoreStage::Update, SystemStage::parallel())
.add_stage(CoreStage::PostUpdate, SystemStage::parallel())
.add_stage(CoreStage::Last, SystemStage::parallel())
}
/// Setup the application to manage events of type `T`.
///
/// This is done by adding a [`Resource`] of type [`Events::<T>`],
/// and inserting an [`update_system`](Events::update_system) into [`CoreStage::First`].
///
/// See [`Events`] for defining events.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// # use bevy_ecs::prelude::*;
/// #
/// # struct MyEvent;
/// # let mut app = App::new();
/// #
/// app.add_event::<MyEvent>();
/// ```
pub fn add_event<T>(&mut self) -> &mut Self
where
T: Event,
{
if !self.world.contains_resource::<Events<T>>() {
self.init_resource::<Events<T>>()
.add_system_to_stage(CoreStage::First, Events::<T>::update_system);
}
self
}
/// Inserts a [`Resource`] to the current [`App`] and overwrites any [`Resource`] previously added of the same type.
///
/// A [`Resource`] in Bevy represents globally unique data. [`Resource`]s must be added to Bevy apps
/// before using them. This happens with [`insert_resource`](Self::insert_resource).
///
/// See [`init_resource`](Self::init_resource) for [`Resource`]s that implement [`Default`] or [`FromWorld`].
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// #
/// struct MyCounter {
/// counter: usize,
/// }
///
/// App::new()
/// .insert_resource(MyCounter { counter: 0 });
/// ```
pub fn insert_resource<R: Resource>(&mut self, resource: R) -> &mut Self {
self.world.insert_resource(resource);
self
}
/// Inserts a non-send [`Resource`] to the app.
///
/// You usually want to use [`insert_resource`](Self::insert_resource),
/// but there are some special cases when a [`Resource`] cannot be sent across threads.
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// #
/// struct MyCounter {
/// counter: usize,
/// }
///
/// App::new()
/// .insert_non_send_resource(MyCounter { counter: 0 });
/// ```
pub fn insert_non_send_resource<R: 'static>(&mut self, resource: R) -> &mut Self {
self.world.insert_non_send_resource(resource);
self
}
/// Initialize a [`Resource`] with standard starting values by adding it to the [`World`].
///
/// If the [`Resource`] already exists, nothing happens.
///
/// The [`Resource`] must implement the [`FromWorld`] trait.
/// If the [`Default`] trait is implemented, the [`FromWorld`] trait will use
/// the [`Default::default`] method to initialize the [`Resource`].
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// #
/// struct MyCounter {
/// counter: usize,
/// }
///
/// impl Default for MyCounter {
/// fn default() -> MyCounter {
/// MyCounter {
/// counter: 100
/// }
/// }
/// }
///
/// App::new()
/// .init_resource::<MyCounter>();
/// ```
pub fn init_resource<R: Resource + FromWorld>(&mut self) -> &mut Self {
self.world.init_resource::<R>();
self
}
/// Initialize a non-send [`Resource`] with standard starting values by adding it to the [`World`].
///
/// The [`Resource`] must implement the [`FromWorld`] trait.
/// If the [`Default`] trait is implemented, the [`FromWorld`] trait will use
/// the [`Default::default`] method to initialize the [`Resource`].
pub fn init_non_send_resource<R: 'static + FromWorld>(&mut self) -> &mut Self {
self.world.init_non_send_resource::<R>();
self
}
/// Sets the function that will be called when the app is run.
///
/// The runner function `run_fn` is called only once by [`App::run`]. If the
/// presence of a main loop in the app is desired, it is the responsibility of the runner
/// function to provide it.
///
/// The runner function is usually not set manually, but by Bevy integrated plugins
/// (e.g. `WinitPlugin`).
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// #
/// fn my_runner(mut app: App) {
/// loop {
/// println!("In main loop");
/// app.update();
/// }
/// }
///
/// App::new()
/// .set_runner(my_runner);
/// ```
pub fn set_runner(&mut self, run_fn: impl Fn(App) + 'static) -> &mut Self {
self.runner = Box::new(run_fn);
self
}
/// Adds a single [`Plugin`].
///
/// One of Bevy's core principles is modularity. All Bevy engine features are implemented
/// as [`Plugin`]s. This includes internal features like the renderer.
///
/// Bevy also provides a few sets of default [`Plugin`]s. See [`add_plugins`](Self::add_plugins).
///
/// # Examples
///
/// ```
/// # use bevy_app::prelude::*;
/// #
/// App::new().add_plugin(bevy_log::LogPlugin::default());
/// ```
pub fn add_plugin<T>(&mut self, plugin: T) -> &mut Self
where
T: Plugin,
{
debug!("added plugin: {}", plugin.name());
plugin.build(self);
self
}
/// Adds a group of [`Plugin`]s.
///
/// [`Plugin`]s can be grouped into a set by using a [`PluginGroup`].
///
/// There are built-in [`PluginGroup`]s that provide core engine functionality.
/// The [`PluginGroup`]s available by default are `DefaultPlugins` and `MinimalPlugins`.
///
/// To customize the plugins in the group (reorder, disable a plugin, add a new plugin
/// before / after another plugin), see [`add_plugins_with`](Self::add_plugins_with).
///
/// ## Examples
/// ```
/// # use bevy_app::{prelude::*, PluginGroupBuilder};
/// #
/// # // Dummy created to avoid using bevy_internal, which pulls in to many dependencies.
/// # struct MinimalPlugins;
/// # impl PluginGroup for MinimalPlugins {
/// # fn build(&mut self, group: &mut PluginGroupBuilder){;}
/// # }
/// #
/// App::new()
/// .add_plugins(MinimalPlugins);
/// ```
pub fn add_plugins<T: PluginGroup>(&mut self, mut group: T) -> &mut Self {
let mut plugin_group_builder = PluginGroupBuilder::default();
group.build(&mut plugin_group_builder);
plugin_group_builder.finish(self);
self
}
/// Adds a group of [`Plugin`]s with an initializer method.
///
/// Can be used to add a group of [`Plugin`]s, where the group is modified
/// before insertion into a Bevy application. For example, you can add
/// additional [`Plugin`]s at a specific place in the [`PluginGroup`], or deactivate
/// specific [`Plugin`]s while keeping the rest using a [`PluginGroupBuilder`].
///
/// # Examples
///
/// ```
/// # use bevy_app::{prelude::*, PluginGroupBuilder};
/// #
/// # // Dummies created to avoid using bevy_internal which pulls in too many dependencies.
/// # struct DefaultPlugins;
/// # impl PluginGroup for DefaultPlugins {
/// # fn build(&mut self, group: &mut PluginGroupBuilder){
/// # group.add(bevy_log::LogPlugin::default());
/// # }
/// # }
/// #
/// # struct MyOwnPlugin;
/// # impl Plugin for MyOwnPlugin {
/// # fn build(&self, app: &mut App){;}
/// # }
/// #
/// App::new()
/// .add_plugins_with(DefaultPlugins, |group| {
/// group.add_before::<bevy_log::LogPlugin, _>(MyOwnPlugin)
/// });
/// ```
pub fn add_plugins_with<T, F>(&mut self, mut group: T, func: F) -> &mut Self
where
T: PluginGroup,
F: FnOnce(&mut PluginGroupBuilder) -> &mut PluginGroupBuilder,
{
let mut plugin_group_builder = PluginGroupBuilder::default();
group.build(&mut plugin_group_builder);
func(&mut plugin_group_builder);
plugin_group_builder.finish(self);
self
}
/// Adds the type `T` to the type registry [`Resource`].
#[cfg(feature = "bevy_reflect")]
pub fn register_type<T: bevy_reflect::GetTypeRegistration>(&mut self) -> &mut Self {
{
let registry = self.world.resource_mut::<bevy_reflect::TypeRegistryArc>();
registry.write().register::<T>();
}
self
}
/// Adds an [`App`] as a child of the current one.
///
/// The provided function `f` is called by the [`update`](Self::update) method. The [`World`]
/// parameter represents the main app world, while the [`App`] parameter is just a mutable
/// reference to the `SubApp` itself.
pub fn add_sub_app(
&mut self,
label: impl AppLabel,
mut app: App,
sub_app_runner: impl Fn(&mut World, &mut App) + 'static,
) -> &mut Self {
if let Some(pool) = self.world.get_resource::<ComputeTaskPool>() {
app.world.insert_resource(pool.clone());
}
if let Some(pool) = self.world.get_resource::<AsyncComputeTaskPool>() {
app.world.insert_resource(pool.clone());
}
if let Some(pool) = self.world.get_resource::<IoTaskPool>() {
app.world.insert_resource(pool.clone());
}
self.sub_apps.insert(
Box::new(label),
SubApp {
app,
runner: Box::new(sub_app_runner),
},
);
self
}
/// Retrieves a `SubApp` stored inside this [`App`].
///
/// # Panics
///
/// Panics if the `SubApp` doesn't exist.
pub fn sub_app_mut(&mut self, label: impl AppLabel) -> &mut App {
match self.get_sub_app_mut(label) {
Ok(app) => app,
Err(label) => panic!("Sub-App with label '{:?}' does not exist", label),
}
}
/// Retrieves a `SubApp` inside this [`App`] with the given label, if it exists. Otherwise returns
/// an [`Err`] containing the given label.
pub fn get_sub_app_mut(&mut self, label: impl AppLabel) -> Result<&mut App, impl AppLabel> {
self.sub_apps
.get_mut((&label) as &dyn AppLabel)
.map(|sub_app| &mut sub_app.app)
.ok_or(label)
}
/// Retrieves a `SubApp` stored inside this [`App`].
///
/// # Panics
///
/// Panics if the `SubApp` doesn't exist.
pub fn sub_app(&self, label: impl AppLabel) -> &App {
match self.get_sub_app(label) {
Ok(app) => app,
Err(label) => panic!("Sub-App with label '{:?}' does not exist", label),
}
}
/// Retrieves a `SubApp` inside this [`App`] with the given label, if it exists. Otherwise returns
/// an [`Err`] containing the given label.
pub fn get_sub_app(&self, label: impl AppLabel) -> Result<&App, impl AppLabel> {
self.sub_apps
.get((&label) as &dyn AppLabel)
.map(|sub_app| &sub_app.app)
.ok_or(label)
}
}
fn run_once(mut app: App) {
app.update();
}
/// An event that indicates the [`App`] should exit. This will fully exit the app process at the
/// start of the next tick of the schedule.
///
/// You can also use this event to detect that an exit was requested. In order to receive it, systems
/// subscribing to this event should run after it was emitted and before the schedule of the same
/// frame is over.
#[derive(Debug, Clone, Default)]
pub struct AppExit;
| 34.147402 | 120 | 0.567902 |
e6018ac2c992de4520a6228659a78a25da326782 | 22,824 | extern crate rltk;
use rltk::{ RGB, Rltk, Console, Point, VirtualKeyCode };
extern crate specs;
use specs::prelude::*;
use super::{Pools, gamelog::GameLog, Map, Name, Position, State, InBackpack,
Viewshed, RunState, Equipped, HungerClock, HungerState, rex_assets::RexAssets,
Hidden, camera, Attributes, Attribute, Consumable };
pub fn draw_hollow_box(
console: &mut Rltk,
sx: i32,
sy: i32,
width: i32,
height: i32,
fg: RGB,
bg: RGB,
) {
use rltk::to_cp437;
console.set(sx, sy, fg, bg, to_cp437('┌'));
console.set(sx + width, sy, fg, bg, to_cp437('┐'));
console.set(sx, sy + height, fg, bg, to_cp437('└'));
console.set(sx + width, sy + height, fg, bg, to_cp437('┘'));
for x in sx + 1..sx + width {
console.set(x, sy, fg, bg, to_cp437('─'));
console.set(x, sy + height, fg, bg, to_cp437('─'));
}
for y in sy + 1..sy + height {
console.set(sx, y, fg, bg, to_cp437('│'));
console.set(sx + width, y, fg, bg, to_cp437('│'));
}
}
fn draw_attribute(name : &str, attribute : &Attribute, y : i32, ctx: &mut Rltk) {
let black = RGB::named(rltk::BLACK);
let attr_gray : RGB = RGB::from_hex("#CCCCCC").expect("Oops");
ctx.print_color(50, y, attr_gray, black, name);
let color : RGB =
if attribute.modifiers < 0 { RGB::from_f32(1.0, 0.0, 0.0) }
else if attribute.modifiers == 0 { RGB::named(rltk::WHITE) }
else { RGB::from_f32(0.0, 1.0, 0.0) };
ctx.print_color(67, y, color, black, &format!("{}", attribute.base + attribute.modifiers));
ctx.print_color(73, y, color, black, &format!("{}", attribute.bonus));
if attribute.bonus > 0 { ctx.set(72, y, color, black, rltk::to_cp437('+')); }
}
pub fn draw_ui(ecs: &World, ctx : &mut Rltk) {
use rltk::to_cp437;
let box_gray : RGB = RGB::from_hex("#999999").expect("Oops");
let black = RGB::named(rltk::BLACK);
let white = RGB::named(rltk::WHITE);
draw_hollow_box(ctx, 0, 0, 79, 59, box_gray, black); // Overall box
draw_hollow_box(ctx, 0, 0, 49, 45, box_gray, black); // Map box
draw_hollow_box(ctx, 0, 45, 79, 14, box_gray, black); // Log box
draw_hollow_box(ctx, 49, 0, 30, 8, box_gray, black); // Top-right panel
// Draw box connectors
ctx.set(0, 45, box_gray, black, to_cp437('├'));
ctx.set(49, 8, box_gray, black, to_cp437('├'));
ctx.set(49, 0, box_gray, black, to_cp437('┬'));
ctx.set(49, 45, box_gray, black, to_cp437('┴'));
ctx.set(79, 8, box_gray, black, to_cp437('┤'));
ctx.set(79, 45, box_gray, black, to_cp437('┤'));
// Draw the town name
let map = ecs.fetch::<Map>();
let name_length = map.name.len() + 2;
let x_pos = (22 - (name_length / 2)) as i32;
ctx.set(x_pos, 0, box_gray, black, to_cp437('┤'));
ctx.set(x_pos + name_length as i32 - 1, 0, box_gray, black, to_cp437('├'));
ctx.print_color(x_pos+1, 0, white, black, &map.name);
std::mem::drop(map);
// Draw stats
let player_entity = ecs.fetch::<Entity>();
let pools = ecs.read_storage::<Pools>();
let player_pools = pools.get(*player_entity).unwrap();
let health = format!("Health: {}/{}", player_pools.hit_points.current, player_pools.hit_points.max);
let mana = format!("Mana: {}/{}", player_pools.mana.current, player_pools.mana.max);
ctx.print_color(50, 1, white, black, &health);
ctx.print_color(50, 2, white, black, &mana);
ctx.draw_bar_horizontal(64, 1, 14, player_pools.hit_points.current, player_pools.hit_points.max, RGB::named(rltk::RED), RGB::named(rltk::BLACK));
ctx.draw_bar_horizontal(64, 2, 14, player_pools.mana.current, player_pools.mana.max, RGB::named(rltk::BLUE), RGB::named(rltk::BLACK));
// Attributes
let attributes = ecs.read_storage::<Attributes>();
let attr = attributes.get(*player_entity).unwrap();
draw_attribute("Might:", &attr.might, 4, ctx);
draw_attribute("Quickness:", &attr.quickness, 5, ctx);
draw_attribute("Fitness:", &attr.fitness, 6, ctx);
draw_attribute("Intelligence:", &attr.intelligence, 7, ctx);
// Equipped
let mut y = 9;
let equipped = ecs.read_storage::<Equipped>();
let name = ecs.read_storage::<Name>();
for (equipped_by, item_name) in (&equipped, &name).join() {
if equipped_by.owner == *player_entity {
ctx.print_color(50, y, white, black, &item_name.name);
y += 1;
}
}
// Consumables
y += 1;
let green = RGB::from_f32(0.0, 1.0, 0.0);
let yellow = RGB::named(rltk::YELLOW);
let consumables = ecs.read_storage::<Consumable>();
let backpack = ecs.read_storage::<InBackpack>();
let mut index = 1;
for (carried_by, _consumable, item_name) in (&backpack, &consumables, &name).join() {
if carried_by.owner == *player_entity && index < 10 {
ctx.print_color(50, y, yellow, black, &format!("↑{}", index));
ctx.print_color(53, y, green, black, &item_name.name);
y += 1;
index += 1;
}
}
// Status
let hunger = ecs.read_storage::<HungerClock>();
let hc = hunger.get(*player_entity).unwrap();
match hc.state {
HungerState::WellFed => ctx.print_color(50, 44, RGB::named(rltk::GREEN), RGB::named(rltk::BLACK), "Well Fed"),
HungerState::Normal => {}
HungerState::Hungry => ctx.print_color(50, 44, RGB::named(rltk::ORANGE), RGB::named(rltk::BLACK), "Hungry"),
HungerState::Starving => ctx.print_color(50, 44, RGB::named(rltk::RED), RGB::named(rltk::BLACK), "Starving"),
}
// Draw the log
let log = ecs.fetch::<GameLog>();
let mut y = 46;
for s in log.entries.iter() {
if y < 59 { ctx.print(2, y, &s.to_string()); }
y += 1;
}
draw_tooltips(ecs, ctx);
}
struct Tooltip {
lines : Vec<String>
}
impl Tooltip {
fn new() -> Tooltip {
Tooltip { lines : Vec::new() }
}
fn add<S:ToString>(&mut self, line : S) {
self.lines.push(line.to_string());
}
fn width(&self) -> i32 {
let mut max = 0;
for s in self.lines.iter() {
if s.len() > max {
max = s.len();
}
}
max as i32 + 2i32
}
fn height(&self) -> i32 { self.lines.len() as i32 + 2i32 }
fn render(&self, ctx : &mut Rltk, x : i32, y : i32) {
let box_gray : RGB = RGB::from_hex("#999999").expect("Oops");
let light_gray : RGB = RGB::from_hex("#DDDDDD").expect("Oops");
let white = RGB::named(rltk::WHITE);
let black = RGB::named(rltk::BLACK);
ctx.draw_box(x, y, self.width()-1, self.height()-1, white, box_gray);
for (i,s) in self.lines.iter().enumerate() {
let col = if i == 0 { white } else { light_gray };
ctx.print_color(x+1, y+i as i32+1, col, black, &s);
}
}
}
fn draw_tooltips(ecs: &World, ctx : &mut Rltk) {
use rltk::to_cp437;
let (min_x, _max_x, min_y, _max_y) = camera::get_screen_bounds(ecs, ctx);
let map = ecs.fetch::<Map>();
let names = ecs.read_storage::<Name>();
let positions = ecs.read_storage::<Position>();
let hidden = ecs.read_storage::<Hidden>();
let attributes = ecs.read_storage::<Attributes>();
let pools = ecs.read_storage::<Pools>();
let entities = ecs.entities();
let mouse_pos = ctx.mouse_pos();
let mut mouse_map_pos = mouse_pos;
mouse_map_pos.0 += min_x - 1;
mouse_map_pos.1 += min_y - 1;
if mouse_map_pos.0 >= map.width-1 || mouse_map_pos.1 >= map.height-1 || mouse_map_pos.0 < 1 || mouse_map_pos.1 < 1
{
return;
}
if !map.visible_tiles[map.xy_idx(mouse_map_pos.0, mouse_map_pos.1)] { return; }
let mut tip_boxes : Vec<Tooltip> = Vec::new();
for (entity, name, position, _hidden) in (&entities, &names, &positions, !&hidden).join() {
if position.x == mouse_map_pos.0 && position.y == mouse_map_pos.1 {
let mut tip = Tooltip::new();
tip.add(name.name.to_string());
// Comment on attributes
let attr = attributes.get(entity);
if let Some(attr) = attr {
let mut s = "".to_string();
if attr.might.bonus < 0 { s += "Weak. " };
if attr.might.bonus > 0 { s += "Strong. " };
if attr.quickness.bonus < 0 { s += "Clumsy. " };
if attr.quickness.bonus > 0 { s += "Agile. " };
if attr.fitness.bonus < 0 { s += "Unheathy. " };
if attr.fitness.bonus > 0 { s += "Healthy." };
if attr.intelligence.bonus < 0 { s += "Unintelligent. "};
if attr.intelligence.bonus > 0 { s += "Smart. "};
if s.is_empty() {
s = "Quite Average".to_string();
}
tip.add(s);
}
// Comment on pools
let stat = pools.get(entity);
if let Some(stat) = stat {
tip.add(format!("Level: {}", stat.level));
}
tip_boxes.push(tip);
}
}
if tip_boxes.is_empty() { return; }
let box_gray : RGB = RGB::from_hex("#999999").expect("Oops");
let white = RGB::named(rltk::WHITE);
let arrow;
let arrow_x;
let arrow_y = mouse_pos.1;
if mouse_pos.0 < 40 {
// Render to the left
arrow = to_cp437('→');
arrow_x = mouse_pos.0 - 1;
} else {
// Render to the right
arrow = to_cp437('←');
arrow_x = mouse_pos.0 + 1;
}
ctx.set(arrow_x, arrow_y, white, box_gray, arrow);
let mut total_height = 0;
for tt in tip_boxes.iter() {
total_height += tt.height();
}
let mut y = mouse_pos.1 - (total_height / 2);
while y + (total_height/2) > 50 {
y -= 1;
}
for tt in tip_boxes.iter() {
let x = if mouse_pos.0 < 40 {
mouse_pos.0 - (1 + tt.width())
} else {
mouse_pos.0 + (1 + tt.width())
};
tt.render(ctx, x, y);
y += tt.height();
}
}
#[derive(PartialEq, Copy, Clone)]
pub enum ItemMenuResult { Cancel, NoResponse, Selected }
pub fn show_inventory(gs : &mut State, ctx : &mut Rltk) -> (ItemMenuResult, Option<Entity>) {
let player_entity = gs.ecs.fetch::<Entity>();
let names = gs.ecs.read_storage::<Name>();
let backpack = gs.ecs.read_storage::<InBackpack>();
let entities = gs.ecs.entities();
let inventory = (&backpack, &names).join().filter(|item| item.0.owner == *player_entity );
let count = inventory.count();
let mut y = (25 - (count / 2)) as i32;
ctx.draw_box(15, y-2, 31, (count+3) as i32, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK));
ctx.print_color(18, y-2, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "Inventory");
ctx.print_color(18, y+count as i32+1, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "ESCAPE to cancel");
let mut equippable : Vec<Entity> = Vec::new();
let mut j = 0;
for (entity, _pack, name) in (&entities, &backpack, &names).join().filter(|item| item.1.owner == *player_entity ) {
ctx.set(17, y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), rltk::to_cp437('('));
ctx.set(18, y, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), 97+j as u8);
ctx.set(19, y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), rltk::to_cp437(')'));
ctx.print(21, y, &name.name.to_string());
equippable.push(entity);
y += 1;
j += 1;
}
match ctx.key {
None => (ItemMenuResult::NoResponse, None),
Some(key) => {
match key {
VirtualKeyCode::Escape => { (ItemMenuResult::Cancel, None) }
_ => {
let selection = rltk::letter_to_option(key);
if selection > -1 && selection < count as i32 {
return (ItemMenuResult::Selected, Some(equippable[selection as usize]));
}
(ItemMenuResult::NoResponse, None)
}
}
}
}
}
pub fn drop_item_menu(gs : &mut State, ctx : &mut Rltk) -> (ItemMenuResult, Option<Entity>) {
let player_entity = gs.ecs.fetch::<Entity>();
let names = gs.ecs.read_storage::<Name>();
let backpack = gs.ecs.read_storage::<InBackpack>();
let entities = gs.ecs.entities();
let inventory = (&backpack, &names).join().filter(|item| item.0.owner == *player_entity );
let count = inventory.count();
let mut y = (25 - (count / 2)) as i32;
ctx.draw_box(15, y-2, 31, (count+3) as i32, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK));
ctx.print_color(18, y-2, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "Drop Which Item?");
ctx.print_color(18, y+count as i32+1, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "ESCAPE to cancel");
let mut equippable : Vec<Entity> = Vec::new();
let mut j = 0;
for (entity, _pack, name) in (&entities, &backpack, &names).join().filter(|item| item.1.owner == *player_entity ) {
ctx.set(17, y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), rltk::to_cp437('('));
ctx.set(18, y, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), 97+j as u8);
ctx.set(19, y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), rltk::to_cp437(')'));
ctx.print(21, y, &name.name.to_string());
equippable.push(entity);
y += 1;
j += 1;
}
match ctx.key {
None => (ItemMenuResult::NoResponse, None),
Some(key) => {
match key {
VirtualKeyCode::Escape => { (ItemMenuResult::Cancel, None) }
_ => {
let selection = rltk::letter_to_option(key);
if selection > -1 && selection < count as i32 {
return (ItemMenuResult::Selected, Some(equippable[selection as usize]));
}
(ItemMenuResult::NoResponse, None)
}
}
}
}
}
pub fn remove_item_menu(gs : &mut State, ctx : &mut Rltk) -> (ItemMenuResult, Option<Entity>) {
let player_entity = gs.ecs.fetch::<Entity>();
let names = gs.ecs.read_storage::<Name>();
let backpack = gs.ecs.read_storage::<Equipped>();
let entities = gs.ecs.entities();
let inventory = (&backpack, &names).join().filter(|item| item.0.owner == *player_entity );
let count = inventory.count();
let mut y = (25 - (count / 2)) as i32;
ctx.draw_box(15, y-2, 31, (count+3) as i32, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK));
ctx.print_color(18, y-2, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "Remove Which Item?");
ctx.print_color(18, y+count as i32+1, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "ESCAPE to cancel");
let mut equippable : Vec<Entity> = Vec::new();
let mut j = 0;
for (entity, _pack, name) in (&entities, &backpack, &names).join().filter(|item| item.1.owner == *player_entity ) {
ctx.set(17, y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), rltk::to_cp437('('));
ctx.set(18, y, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), 97+j as u8);
ctx.set(19, y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), rltk::to_cp437(')'));
ctx.print(21, y, &name.name.to_string());
equippable.push(entity);
y += 1;
j += 1;
}
match ctx.key {
None => (ItemMenuResult::NoResponse, None),
Some(key) => {
match key {
VirtualKeyCode::Escape => { (ItemMenuResult::Cancel, None) }
_ => {
let selection = rltk::letter_to_option(key);
if selection > -1 && selection < count as i32 {
return (ItemMenuResult::Selected, Some(equippable[selection as usize]));
}
(ItemMenuResult::NoResponse, None)
}
}
}
}
}
pub fn ranged_target(gs : &mut State, ctx : &mut Rltk, range : i32) -> (ItemMenuResult, Option<Point>) {
let (min_x, max_x, min_y, max_y) = camera::get_screen_bounds(&gs.ecs, ctx);
let player_entity = gs.ecs.fetch::<Entity>();
let player_pos = gs.ecs.fetch::<Point>();
let viewsheds = gs.ecs.read_storage::<Viewshed>();
ctx.print_color(5, 0, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "Select Target:");
// Highlight available target cells
let mut available_cells = Vec::new();
let visible = viewsheds.get(*player_entity);
if let Some(visible) = visible {
// We have a viewshed
for idx in visible.visible_tiles.iter() {
let distance = rltk::DistanceAlg::Pythagoras.distance2d(*player_pos, *idx);
if distance <= range as f32 {
let screen_x = idx.x - min_x;
let screen_y = idx.y - min_y;
if screen_x > 1 && screen_x < (max_x - min_x)-1 && screen_y > 1 && screen_y < (max_y - min_y)-1 {
ctx.set_bg(screen_x, screen_y, RGB::named(rltk::BLUE));
available_cells.push(idx);
}
}
}
} else {
return (ItemMenuResult::Cancel, None);
}
// Draw mouse cursor
let mouse_pos = ctx.mouse_pos();
let mut mouse_map_pos = mouse_pos;
mouse_map_pos.0 += min_x - 1;
mouse_map_pos.1 += min_y - 1;
let mut valid_target = false;
for idx in available_cells.iter() { if idx.x == mouse_map_pos.0 && idx.y == mouse_map_pos.1 { valid_target = true; } }
if valid_target {
ctx.set_bg(mouse_pos.0, mouse_pos.1, RGB::named(rltk::CYAN));
if ctx.left_click {
return (ItemMenuResult::Selected, Some(Point::new(mouse_map_pos.0, mouse_map_pos.1)));
}
} else {
ctx.set_bg(mouse_pos.0, mouse_pos.1, RGB::named(rltk::RED));
if ctx.left_click {
return (ItemMenuResult::Cancel, None);
}
}
(ItemMenuResult::NoResponse, None)
}
#[derive(PartialEq, Copy, Clone)]
pub enum MainMenuSelection { NewGame, LoadGame, Quit }
#[derive(PartialEq, Copy, Clone)]
pub enum MainMenuResult { NoSelection{ selected : MainMenuSelection }, Selected{ selected: MainMenuSelection } }
pub fn main_menu(gs : &mut State, ctx : &mut Rltk) -> MainMenuResult {
let save_exists = super::saveload_system::does_save_exist();
let runstate = gs.ecs.fetch::<RunState>();
let assets = gs.ecs.fetch::<RexAssets>();
ctx.render_xp_sprite(&assets.menu, 0, 0);
ctx.draw_box_double(24, 18, 31, 10, RGB::named(rltk::WHEAT), RGB::named(rltk::BLACK));
ctx.print_color_centered(20, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "Rust Roguelike Tutorial");
ctx.print_color_centered(21, RGB::named(rltk::CYAN), RGB::named(rltk::BLACK), "by Herbert Wolverson");
ctx.print_color_centered(22, RGB::named(rltk::GRAY), RGB::named(rltk::BLACK), "Use Up/Down Arrows and Enter");
let mut y = 24;
if let RunState::MainMenu{ menu_selection : selection } = *runstate {
if selection == MainMenuSelection::NewGame {
ctx.print_color_centered(y, RGB::named(rltk::MAGENTA), RGB::named(rltk::BLACK), "Begin New Game");
} else {
ctx.print_color_centered(y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), "Begin New Game");
}
y += 1;
if save_exists {
if selection == MainMenuSelection::LoadGame {
ctx.print_color_centered(y, RGB::named(rltk::MAGENTA), RGB::named(rltk::BLACK), "Load Game");
} else {
ctx.print_color_centered(y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), "Load Game");
}
y += 1;
}
if selection == MainMenuSelection::Quit {
ctx.print_color_centered(y, RGB::named(rltk::MAGENTA), RGB::named(rltk::BLACK), "Quit");
} else {
ctx.print_color_centered(y, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), "Quit");
}
match ctx.key {
None => return MainMenuResult::NoSelection{ selected: selection },
Some(key) => {
match key {
VirtualKeyCode::Escape => { return MainMenuResult::NoSelection{ selected: MainMenuSelection::Quit } }
VirtualKeyCode::Up => {
let mut newselection;
match selection {
MainMenuSelection::NewGame => newselection = MainMenuSelection::Quit,
MainMenuSelection::LoadGame => newselection = MainMenuSelection::NewGame,
MainMenuSelection::Quit => newselection = MainMenuSelection::LoadGame
}
if newselection == MainMenuSelection::LoadGame && !save_exists {
newselection = MainMenuSelection::NewGame;
}
return MainMenuResult::NoSelection{ selected: newselection }
}
VirtualKeyCode::Down => {
let mut newselection;
match selection {
MainMenuSelection::NewGame => newselection = MainMenuSelection::LoadGame,
MainMenuSelection::LoadGame => newselection = MainMenuSelection::Quit,
MainMenuSelection::Quit => newselection = MainMenuSelection::NewGame
}
if newselection == MainMenuSelection::LoadGame && !save_exists {
newselection = MainMenuSelection::Quit;
}
return MainMenuResult::NoSelection{ selected: newselection }
}
VirtualKeyCode::Return => return MainMenuResult::Selected{ selected : selection },
_ => return MainMenuResult::NoSelection{ selected: selection }
}
}
}
}
MainMenuResult::NoSelection { selected: MainMenuSelection::NewGame }
}
#[derive(PartialEq, Copy, Clone)]
pub enum GameOverResult { NoSelection, QuitToMenu }
pub fn game_over(ctx : &mut Rltk) -> GameOverResult {
ctx.print_color_centered(15, RGB::named(rltk::YELLOW), RGB::named(rltk::BLACK), "Your journey has ended!");
ctx.print_color_centered(17, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), "One day, we'll tell you all about how you did.");
ctx.print_color_centered(18, RGB::named(rltk::WHITE), RGB::named(rltk::BLACK), "That day, sadly, is not in this chapter..");
ctx.print_color_centered(20, RGB::named(rltk::MAGENTA), RGB::named(rltk::BLACK), "Press any key to return to the menu.");
match ctx.key {
None => GameOverResult::NoSelection,
Some(_) => GameOverResult::QuitToMenu
}
}
| 40.976661 | 149 | 0.570189 |
cc1008419062697f2e0483f35189d6aaf068c25f | 237 |
use crate::*;
pub struct DebugContainer {
}
impl DebugContainer {
}
impl BuildHandler for DebugContainer {
fn on_build(&mut self, state: &mut State, entity: Entity) {
}
}
impl EventHandler for DebugContainer {
} | 11.285714 | 63 | 0.662447 |
75dfed2ce303e67036740d4fc9cb7945a453d250 | 342 | fn main() {
let mut count = 0u32;
println!("Let's count until infinity!");
loop {
count += 1;
if count == 3 {
println!("three");
continue;
}
println!("{}", count);
if count == 5 {
println!("OK,that's enough");
break;
}
}
}
| 14.25 | 44 | 0.388889 |
ebcea597552f941bc900c49cc16a09d1cd6257b2 | 793 | //! State storage code for Zebra. 🦓
#![doc(html_favicon_url = "https://www.zfnd.org/images/zebra-favicon-128.png")]
#![doc(html_logo_url = "https://www.zfnd.org/images/zebra-icon.png")]
#![doc(html_root_url = "https://doc.zebra.zfnd.org/zebra_state")]
#![warn(missing_docs)]
#![allow(clippy::try_err)]
mod config;
mod constants;
mod memory_state;
mod request;
mod response;
mod service;
mod sled_state;
mod util;
// TODO: move these to integration tests.
#[cfg(test)]
mod tests;
use memory_state::MemoryState;
use service::QueuedBlock;
use sled_state::SledState;
pub use config::Config;
pub use request::{HashOrHeight, Request};
pub use response::Response;
pub use service::init;
/// A boxed [`std::error::Error`].
pub type BoxError = Box<dyn std::error::Error + Send + Sync + 'static>;
| 24.030303 | 79 | 0.718789 |
acc80c0f3e4e80c3120c727e7511548cb93c8a9f | 2,503 | // WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "android-database-StaleDataException"))]
__jni_bindgen! {
/// public class [StaleDataException](https://developer.android.com/reference/android/database/StaleDataException.html)
///
/// Required feature: android-database-StaleDataException
public class StaleDataException ("android/database/StaleDataException") extends crate::java::lang::RuntimeException {
/// [StaleDataException](https://developer.android.com/reference/android/database/StaleDataException.html#StaleDataException())
pub fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::database::StaleDataException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/database/StaleDataException", java.flags == PUBLIC, .name == "<init>", .descriptor == "()V"
unsafe {
let __jni_args = [];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/database/StaleDataException\0", "<init>\0", "()V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [StaleDataException](https://developer.android.com/reference/android/database/StaleDataException.html#StaleDataException(java.lang.String))
///
/// Required features: "java-lang-String"
#[cfg(any(feature = "all", all(feature = "java-lang-String")))]
pub fn new_String<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::database::StaleDataException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/database/StaleDataException", java.flags == PUBLIC, .name == "<init>", .descriptor == "(Ljava/lang/String;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/database/StaleDataException\0", "<init>\0", "(Ljava/lang/String;)V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
}
}
| 71.514286 | 355 | 0.671994 |
219ad97b6efb9c37ee9cb6108588ae30b43d5bf5 | 3,793 | use byteorder::{NetworkEndian, ReadBytesExt, WriteBytesExt};
use std::error::Error;
use std::io::prelude::*;
use deserialize::{self, FromSql};
use pg::Pg;
use serialize::{self, IsNull, Output, ToSql};
use sql_types;
#[cfg(feature = "quickcheck")]
mod quickcheck_impls;
#[derive(Debug, Clone, PartialEq, Eq, FromSqlRow, AsExpression)]
#[sql_type = "sql_types::Numeric"]
/// Represents a NUMERIC value, closely mirroring the PG wire protocol
/// representation
pub enum PgNumeric {
/// A positive number
Positive {
/// How many digits come before the decimal point?
weight: i16,
/// How many significant digits are there?
scale: u16,
/// The digits in this number, stored in base 10000
digits: Vec<i16>,
},
/// A negative number
Negative {
/// How many digits come before the decimal point?
weight: i16,
/// How many significant digits are there?
scale: u16,
/// The digits in this number, stored in base 10000
digits: Vec<i16>,
},
/// Not a number
NaN,
}
#[derive(Debug, Clone, Copy)]
struct InvalidNumericSign(u16);
impl ::std::fmt::Display for InvalidNumericSign {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "InvalidNumericSign({0:x})", self.0)
}
}
impl Error for InvalidNumericSign {
fn description(&self) -> &str {
"sign for numeric field was not one of 0, 0x4000, 0xC000"
}
}
impl FromSql<sql_types::Numeric, Pg> for PgNumeric {
fn from_sql(bytes: Option<&[u8]>) -> deserialize::Result<Self> {
let mut bytes = not_none!(bytes);
let ndigits = try!(bytes.read_u16::<NetworkEndian>());
let mut digits = Vec::with_capacity(ndigits as usize);
let weight = try!(bytes.read_i16::<NetworkEndian>());
let sign = try!(bytes.read_u16::<NetworkEndian>());
let scale = try!(bytes.read_u16::<NetworkEndian>());
for _ in 0..ndigits {
digits.push(try!(bytes.read_i16::<NetworkEndian>()));
}
match sign {
0 => Ok(PgNumeric::Positive {
weight: weight,
scale: scale,
digits: digits,
}),
0x4000 => Ok(PgNumeric::Negative {
weight: weight,
scale: scale,
digits: digits,
}),
0xC000 => Ok(PgNumeric::NaN),
invalid => Err(Box::new(InvalidNumericSign(invalid))),
}
}
}
impl ToSql<sql_types::Numeric, Pg> for PgNumeric {
fn to_sql<W: Write>(&self, out: &mut Output<W, Pg>) -> serialize::Result {
let sign = match *self {
PgNumeric::Positive { .. } => 0,
PgNumeric::Negative { .. } => 0x4000,
PgNumeric::NaN => 0xC000,
};
let empty_vec = Vec::new();
let digits = match *self {
PgNumeric::Positive { ref digits, .. } | PgNumeric::Negative { ref digits, .. } => {
digits
}
PgNumeric::NaN => &empty_vec,
};
let weight = match *self {
PgNumeric::Positive { weight, .. } | PgNumeric::Negative { weight, .. } => weight,
PgNumeric::NaN => 0,
};
let scale = match *self {
PgNumeric::Positive { scale, .. } | PgNumeric::Negative { scale, .. } => scale,
PgNumeric::NaN => 0,
};
try!(out.write_u16::<NetworkEndian>(digits.len() as u16));
try!(out.write_i16::<NetworkEndian>(weight));
try!(out.write_u16::<NetworkEndian>(sign));
try!(out.write_u16::<NetworkEndian>(scale));
for digit in digits.iter() {
try!(out.write_i16::<NetworkEndian>(*digit));
}
Ok(IsNull::No)
}
}
| 32.418803 | 96 | 0.558397 |
1e226baead897ff39152f9b38319f4f900bb1c18 | 12,878 | //! Implementation of Chalk debug helper functions using TLS.
use std::fmt;
use chalk_ir::{AliasTy, GenericArg, Goal, Goals, Lifetime, ProgramClauseImplication, TypeName};
use itertools::Itertools;
use super::{from_chalk, Interner};
use crate::{db::HirDatabase, CallableDef, TypeCtor};
use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId};
pub use unsafe_tls::{set_current_program, with_current_program};
pub struct DebugContext<'a>(&'a dyn HirDatabase);
impl DebugContext<'_> {
pub fn debug_struct_id(
&self,
id: super::AdtId,
f: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_ctor: TypeCtor = from_chalk(self.0, TypeName::Adt(id));
match type_ctor {
TypeCtor::Bool => write!(f, "bool")?,
TypeCtor::Char => write!(f, "char")?,
TypeCtor::Int(t) => write!(f, "{}", t)?,
TypeCtor::Float(t) => write!(f, "{}", t)?,
TypeCtor::Str => write!(f, "str")?,
TypeCtor::Slice => write!(f, "slice")?,
TypeCtor::Array => write!(f, "array")?,
TypeCtor::RawPtr(m) => write!(f, "*{}", m.as_keyword_for_ptr())?,
TypeCtor::Ref(m) => write!(f, "&{}", m.as_keyword_for_ref())?,
TypeCtor::Never => write!(f, "!")?,
TypeCtor::Tuple { .. } => {
write!(f, "()")?;
}
TypeCtor::FnPtr { .. } => {
write!(f, "fn")?;
}
TypeCtor::FnDef(def) => {
let name = match def {
CallableDef::FunctionId(ff) => self.0.function_data(ff).name.clone(),
CallableDef::StructId(s) => self.0.struct_data(s).name.clone(),
CallableDef::EnumVariantId(e) => {
let enum_data = self.0.enum_data(e.parent);
enum_data.variants[e.local_id].name.clone()
}
};
match def {
CallableDef::FunctionId(_) => write!(f, "{{fn {}}}", name)?,
CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
write!(f, "{{ctor {}}}", name)?
}
}
}
TypeCtor::Adt(def_id) => {
let name = match def_id {
AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
};
write!(f, "{}", name)?;
}
TypeCtor::AssociatedType(type_alias) => {
let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(it) => it,
_ => panic!("not an associated type"),
};
let trait_name = self.0.trait_data(trait_).name.clone();
let name = self.0.type_alias_data(type_alias).name.clone();
write!(f, "{}::{}", trait_name, name)?;
}
TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id {
crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
write!(f, "{{impl trait {} of {:?}}}", idx, func)?;
}
},
TypeCtor::Closure { def, expr } => {
write!(f, "{{closure {:?} in ", expr.into_raw())?;
match def {
DefWithBodyId::FunctionId(func) => {
write!(f, "fn {}", self.0.function_data(func).name)?
}
DefWithBodyId::StaticId(s) => {
if let Some(name) = self.0.static_data(s).name.as_ref() {
write!(f, "body of static {}", name)?;
} else {
write!(f, "body of unnamed static {:?}", s)?;
}
}
DefWithBodyId::ConstId(c) => {
if let Some(name) = self.0.const_data(c).name.as_ref() {
write!(f, "body of const {}", name)?;
} else {
write!(f, "body of unnamed const {:?}", c)?;
}
}
};
write!(f, "}}")?;
}
}
Ok(())
}
pub fn debug_trait_id(
&self,
id: super::TraitId,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let trait_: hir_def::TraitId = from_chalk(self.0, id);
let trait_data = self.0.trait_data(trait_);
write!(fmt, "{}", trait_data.name)
}
pub fn debug_assoc_type_id(
&self,
id: super::AssocTypeId,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_alias: TypeAliasId = from_chalk(self.0, id);
let type_alias_data = self.0.type_alias_data(type_alias);
let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let trait_data = self.0.trait_data(trait_);
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
}
pub fn debug_opaque_ty_id(
&self,
opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
}
pub fn debug_alias(
&self,
alias_ty: &AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
match alias_ty {
AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt),
AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt),
}
}
pub fn debug_projection_ty(
&self,
projection_ty: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id);
let type_alias_data = self.0.type_alias_data(type_alias);
let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let trait_data = self.0.trait_data(trait_);
let params = projection_ty.substitution.as_slice(&Interner);
write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?;
if params.len() > 1 {
write!(
fmt,
"<{}>",
¶ms[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name)
}
pub fn debug_opaque_ty(
&self,
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
}
pub fn debug_ty(
&self,
ty: &chalk_ir::Ty<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", ty.data(&Interner))
}
pub fn debug_lifetime(
&self,
lifetime: &Lifetime<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", lifetime.data(&Interner))
}
pub fn debug_generic_arg(
&self,
parameter: &GenericArg<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
}
pub fn debug_goal(
&self,
goal: &Goal<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let goal_data = goal.data(&Interner);
write!(fmt, "{:?}", goal_data)
}
pub fn debug_goals(
&self,
goals: &Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", goals.debug(&Interner))
}
pub fn debug_program_clause_implication(
&self,
pci: &ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", pci.debug(&Interner))
}
pub fn debug_application_ty(
&self,
application_ty: &chalk_ir::ApplicationTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", application_ty.debug(&Interner))
}
pub fn debug_substitution(
&self,
substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", substitution.debug(&Interner))
}
pub fn debug_separator_trait_ref(
&self,
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
}
pub fn debug_fn_def_id(
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let def: CallableDef = from_chalk(self.0, fn_def_id);
let name = match def {
CallableDef::FunctionId(ff) => self.0.function_data(ff).name.clone(),
CallableDef::StructId(s) => self.0.struct_data(s).name.clone(),
CallableDef::EnumVariantId(e) => {
let enum_data = self.0.enum_data(e.parent);
enum_data.variants[e.local_id].name.clone()
}
};
match def {
CallableDef::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
write!(fmt, "{{ctor {}}}", name)
}
}
}
pub fn debug_const(
&self,
_constant: &chalk_ir::Const<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "const")
}
pub fn debug_variable_kinds(
&self,
variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.as_slice(&Interner))
}
pub fn debug_variable_kinds_with_angles(
&self,
variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner))
}
pub fn debug_canonical_var_kinds(
&self,
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner))
}
pub fn debug_program_clause(
&self,
clause: &chalk_ir::ProgramClause<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clause.data(&Interner))
}
pub fn debug_program_clauses(
&self,
clauses: &chalk_ir::ProgramClauses<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clauses.as_slice(&Interner))
}
pub fn debug_quantified_where_clauses(
&self,
clauses: &chalk_ir::QuantifiedWhereClauses<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clauses.as_slice(&Interner))
}
}
mod unsafe_tls {
use super::DebugContext;
use crate::db::HirDatabase;
use scoped_tls::scoped_thread_local;
scoped_thread_local!(static PROGRAM: DebugContext);
pub fn with_current_program<R>(
op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
) -> R {
if PROGRAM.is_set() {
PROGRAM.with(|prog| op(Some(prog)))
} else {
op(None)
}
}
pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
where
OP: FnOnce() -> R,
{
let ctx = DebugContext(p);
// we're transmuting the lifetime in the DebugContext to static. This is
// fine because we only keep the reference for the lifetime of this
// function, *and* the only way to access the context is through
// `with_current_program`, which hides the lifetime through the `for`
// type.
let static_p: &DebugContext<'static> =
unsafe { std::mem::transmute::<&DebugContext, &DebugContext<'static>>(&ctx) };
PROGRAM.set(static_p, || op())
}
}
| 35.871866 | 95 | 0.514909 |
1d4e38f5512e7b0211550c25f28e8c8b9e2f23bd | 1,347 | use bevy::prelude::*;
fn main() {
App::build()
.add_default_plugins()
.add_startup_system(setup.system())
.add_system(animate_sprite_system.system())
.run();
}
fn animate_sprite_system(
texture_atlases: Res<Assets<TextureAtlas>>,
mut query: Query<(&mut Timer, &mut TextureAtlasSprite, &Handle<TextureAtlas>)>,
) {
for (timer, mut sprite, texture_atlas_handle) in &mut query.iter() {
if timer.finished {
let texture_atlas = texture_atlases.get(texture_atlas_handle).unwrap();
sprite.index = ((sprite.index as usize + 1) % texture_atlas.textures.len()) as u32;
}
}
}
fn setup(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut texture_atlases: ResMut<Assets<TextureAtlas>>,
) {
let texture_handle = asset_server.load("textures/rpg/chars/gabe/gabe-idle-run.png");
let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(24.0, 24.0), 7, 1);
let texture_atlas_handle = texture_atlases.add(texture_atlas);
commands
.spawn(Camera2dComponents::default())
.spawn(SpriteSheetComponents {
texture_atlas: texture_atlas_handle,
transform: Transform::from_scale(Vec3::splat(6.0)),
..Default::default()
})
.with(Timer::from_seconds(0.1, true));
}
| 33.675 | 95 | 0.651819 |
2980390912292b664bc29f65d9d86604655f790b | 2,941 | use crate::cpu::{read, CPU};
use crate::ppu::PPU;
use crate::NROM;
pub fn abs(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let ll = read(cpu, ppu, &n_rom, mem, cpu.pc) as u16;
let hh = read(cpu, ppu, &n_rom, mem, cpu.pc + 1) as u16;
cpu.pc += 2;
((hh << 8) | ll, 4)
}
pub fn abs_x(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let (address, mut cycle) = abs(cpu, ppu, n_rom, mem);
let address_with_x = address.wrapping_add(cpu.x as u16);
if address_with_x & 0xFF00 != address & 0xFF00 {
cycle += 1;
}
(address_with_x, cycle)
}
pub fn abs_y(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let (address, mut cycle) = abs(cpu, ppu, n_rom, mem);
let address_with_y = address.wrapping_add(cpu.y as u16);
if address_with_y & 0xFF00 != address & 0xFF00 {
cycle += 1;
}
(address_with_y, cycle)
}
pub fn imm_rel(cpu: &mut CPU) -> (u16, usize) {
cpu.pc += 1;
(cpu.pc - 1, 2)
}
pub fn ind(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let ll = read(cpu, ppu, &n_rom, mem, cpu.pc) as u16;
let hh = read(cpu, ppu, &n_rom, mem, cpu.pc + 1) as u16;
cpu.pc += 2;
let low_byte = read(cpu, ppu, &n_rom, mem, (hh << 8) | ll) as u16;
let high_byte = read(cpu, ppu, &n_rom, mem, (hh << 8) | ((ll + 1) & 0xFF)) as u16;
((high_byte << 8) | low_byte, 5)
}
pub fn ind_y(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let zp_ll = read(cpu, ppu, n_rom, mem, cpu.pc);
let zp_hh = zp_ll.wrapping_add(1);
cpu.pc += 1;
let ll = read(cpu, ppu, n_rom, mem, zp_ll as u16) as u16;
let hh = read(cpu, ppu, n_rom, mem, zp_hh as u16) as u16;
let address = ((hh) << 8) | ll;
let address_with_y = address.wrapping_add(cpu.y as u16);
if address & 0xFF00 != address_with_y & 0xFF00 {
return (address_with_y, 6);
}
(address_with_y, 5)
}
pub fn x_ind(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let ll_with_x = read(cpu, ppu, &n_rom, mem, cpu.pc).wrapping_add(cpu.x) as u16;
cpu.pc += 1;
let low_byte = read(cpu, ppu, &n_rom, mem, ll_with_x) as u16;
let high_byte = read(cpu, ppu, &n_rom, mem, (ll_with_x + 1) & 0xFF) as u16;
((high_byte << 8) | low_byte, 6)
}
pub fn zpg(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let address = read(cpu, ppu, n_rom, mem, cpu.pc) as u16;
cpu.pc += 1;
(address, 3)
}
pub fn zpg_x(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let (address, cycles) = zpg(cpu, ppu, n_rom, mem);
((address + (cpu.x as u16)) & 0xFF, cycles + 1)
}
pub fn zpg_y(cpu: &mut CPU, ppu: &mut PPU, n_rom: &NROM, mem: &[u8]) -> (u16, usize) {
let (address, cycles) = zpg(cpu, ppu, n_rom, mem);
((address + (cpu.y as u16)) & 0xFF, cycles + 1)
}
| 36.308642 | 86 | 0.571234 |
8a6fe16868269197ca7c68e895ad782825309e36 | 3,064 | mod engine;
mod synchronization;
use rand::random;
use protocol::types::{
Address, Block, Bytes, Hash, Hasher, Header, Hex, MerkleRoot, Proof, Validator,
};
use crate::status::CurrentStatus;
const _HEIGHT_TEN: u64 = 10;
fn _mock_block_from_status(status: &CurrentStatus) -> Block {
let block_header = Header {
chain_id: 0,
number: status.last_number + 1,
prev_hash: status.prev_hash,
timestamp: random::<u64>(),
transactions_root: _mock_hash(),
signed_txs_hash: _mock_hash(),
state_root: status.state_root,
receipts_root: status.receipts_root,
gas_used: status.gas_used,
gas_limit: status.gas_limit,
proposer: _mock_address().0,
proof: _mock_proof(status.last_number),
log_bloom: Default::default(),
difficulty: Default::default(),
extra_data: Default::default(),
mixed_hash: Default::default(),
nonce: Default::default(),
base_fee_per_gas: Default::default(),
last_checkpoint_block_hash: Default::default(),
};
Block {
header: block_header,
tx_hashes: vec![],
}
}
fn _mock_current_status() -> CurrentStatus {
CurrentStatus {
gas_used: random::<u64>().into(),
gas_limit: random::<u64>().into(),
log_bloom: Default::default(),
base_fee_per_gas: Default::default(),
last_number: _HEIGHT_TEN,
prev_hash: _mock_hash(),
state_root: _mock_hash(),
receipts_root: _mock_hash(),
proof: _mock_proof(_HEIGHT_TEN),
}
}
fn _mock_proof(proof_number: u64) -> Proof {
Proof {
number: proof_number,
round: random::<u64>(),
signature: _get_random_bytes(64),
bitmap: _get_random_bytes(20),
block_hash: _mock_hash(),
}
}
fn _mock_roots(len: u64) -> Vec<MerkleRoot> {
(0..len).map(|_| _mock_hash()).collect::<Vec<_>>()
}
fn _mock_hash() -> Hash {
Hasher::digest(_get_random_bytes(10))
}
fn _mock_address() -> Address {
let hash = _mock_hash();
Address::from_hash(hash)
}
fn _get_random_bytes(len: usize) -> Bytes {
let vec: Vec<u8> = (0..len).map(|_| random::<u8>()).collect();
Bytes::from(vec)
}
fn _mock_pub_key() -> Hex {
Hex::from_string(
"0x026c184a9016f6f71a234c86b141621f38b68c78602ab06768db4d83682c616004".to_owned(),
)
.unwrap()
}
fn _mock_validators(len: usize) -> Vec<Validator> {
(0..len).map(|_| _mock_validator()).collect::<Vec<_>>()
}
fn _mock_validator() -> Validator {
Validator {
pub_key: _mock_pub_key().as_bytes(),
propose_weight: random::<u32>(),
vote_weight: random::<u32>(),
}
}
| 29.747573 | 90 | 0.546345 |
d94c0af634f67a66eea948e5065cad9edea4163c | 7,909 | //! Displays spheres with physically based materials.
use amethyst::{
assets::AssetLoaderSystemData,
core::{
ecs::{Builder, ReadExpect, Resources, SystemData},
Transform, TransformBundle,
},
renderer::{
camera::Camera,
light::{Light, PointLight},
mtl::{Material, MaterialDefaults},
palette::{LinSrgba, Srgb},
pass::DrawPbrDesc,
rendy::{
factory::Factory,
graph::{
render::{RenderGroupDesc, SubpassBuilder},
GraphBuilder,
},
hal::{format::Format, image},
mesh::{Normal, Position, Tangent, TexCoord},
texture::palette::load_from_linear_rgba,
},
shape::Shape,
types::{DefaultBackend, Texture},
GraphCreator, Mesh, RenderingSystem,
},
utils::application_root_dir,
window::{ScreenDimensions, Window, WindowBundle},
Application, GameData, GameDataBuilder, SimpleState, StateData,
};
struct Example;
impl SimpleState for Example {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let StateData { world, .. } = data;
let mat_defaults = world.read_resource::<MaterialDefaults>().0.clone();
println!("Load mesh");
let (mesh, albedo) = {
let mesh = world.exec(|loader: AssetLoaderSystemData<'_, Mesh>| {
loader.load_from_data(
Shape::Sphere(32, 32)
.generate::<(Vec<Position>, Vec<Normal>, Vec<Tangent>, Vec<TexCoord>)>(None)
.into(),
(),
)
});
let albedo = world.exec(|loader: AssetLoaderSystemData<'_, Texture>| {
loader.load_from_data(
load_from_linear_rgba(LinSrgba::new(1.0, 1.0, 1.0, 0.5)).into(),
(),
)
});
(mesh, albedo)
};
println!("Create spheres");
for i in 0..5 {
for j in 0..5 {
let roughness = 1.0f32 * (i as f32 / 4.0f32);
let metallic = 1.0f32 * (j as f32 / 4.0f32);
let mut pos = Transform::default();
pos.set_translation_xyz(2.0f32 * (i - 2) as f32, 2.0f32 * (j - 2) as f32, 0.0);
let mtl = world.exec(
|(mtl_loader, tex_loader): (
AssetLoaderSystemData<'_, Material>,
AssetLoaderSystemData<'_, Texture>,
)| {
let metallic_roughness = tex_loader.load_from_data(
load_from_linear_rgba(LinSrgba::new(0.0, roughness, metallic, 0.0))
.into(),
(),
);
mtl_loader.load_from_data(
Material {
albedo: albedo.clone(),
metallic_roughness,
..mat_defaults.clone()
},
(),
)
},
);
world
.create_entity()
.with(pos)
.with(mesh.clone())
.with(mtl)
.build();
}
}
println!("Create lights");
let light1: Light = PointLight {
intensity: 6.0,
color: Srgb::new(0.8, 0.0, 0.0),
..PointLight::default()
}
.into();
let mut light1_transform = Transform::default();
light1_transform.set_translation_xyz(6.0, 6.0, -6.0);
let light2: Light = PointLight {
intensity: 5.0,
color: Srgb::new(0.0, 0.3, 0.7),
..PointLight::default()
}
.into();
let mut light2_transform = Transform::default();
light2_transform.set_translation_xyz(6.0, -6.0, -6.0);
world
.create_entity()
.with(light1)
.with(light1_transform)
.build();
world
.create_entity()
.with(light2)
.with(light2_transform)
.build();
println!("Put camera");
let mut transform = Transform::default();
transform.set_translation_xyz(0.0, 0.0, -12.0);
transform.prepend_rotation_y_axis(std::f32::consts::PI);
let (width, height) = {
let dim = world.read_resource::<ScreenDimensions>();
(dim.width(), dim.height())
};
world
.create_entity()
.with(Camera::standard_3d(width, height))
.with(transform)
.build();
}
}
fn main() -> amethyst::Result<()> {
amethyst::start_logger(Default::default());
let app_root = application_root_dir()?;
let display_config_path = app_root.join("examples/material/config/display.ron");
let assets_directory = app_root.join("examples/assets/");
let game_data = GameDataBuilder::default()
.with_bundle(WindowBundle::from_config_path(display_config_path))?
.with_bundle(TransformBundle::new())?
.with_thread_local(RenderingSystem::<DefaultBackend, _>::new(
ExampleGraph::default(),
));
let mut game = Application::new(&assets_directory, Example, game_data)?;
game.run();
Ok(())
}
#[derive(Default)]
struct ExampleGraph {
dimensions: Option<ScreenDimensions>,
surface_format: Option<Format>,
dirty: bool,
}
#[allow(clippy::map_clone)]
impl GraphCreator<DefaultBackend> for ExampleGraph {
fn rebuild(&mut self, res: &Resources) -> bool {
// Rebuild when dimensions change, but wait until at least two frames have the same.
let new_dimensions = res.try_fetch::<ScreenDimensions>();
use std::ops::Deref;
if self.dimensions.as_ref() != new_dimensions.as_ref().map(|d| d.deref()) {
self.dirty = true;
self.dimensions = new_dimensions.map(|d| d.clone());
return false;
}
self.dirty
}
fn builder(
&mut self,
factory: &mut Factory<DefaultBackend>,
res: &Resources,
) -> GraphBuilder<DefaultBackend, Resources> {
use amethyst::renderer::rendy::{
graph::present::PresentNode,
hal::command::{ClearDepthStencil, ClearValue},
};
self.dirty = false;
let window = <ReadExpect<'_, Window>>::fetch(res);
let surface = factory.create_surface(&window);
// cache surface format to speed things up
let surface_format = *self
.surface_format
.get_or_insert_with(|| factory.get_surface_format(&surface));
let dimensions = self.dimensions.as_ref().unwrap();
let window_kind =
image::Kind::D2(dimensions.width() as u32, dimensions.height() as u32, 1, 1);
let mut graph_builder = GraphBuilder::new();
let color = graph_builder.create_image(
window_kind,
1,
surface_format,
Some(ClearValue::Color([0.34, 0.36, 0.52, 1.0].into())),
);
let depth = graph_builder.create_image(
window_kind,
1,
Format::D32Sfloat,
Some(ClearValue::DepthStencil(ClearDepthStencil(1.0, 0))),
);
let pbr_pass = graph_builder.add_node(
SubpassBuilder::new()
.with_group(DrawPbrDesc::default().builder())
.with_color(color)
.with_depth_stencil(depth)
.into_pass(),
);
let _present = graph_builder
.add_node(PresentNode::builder(factory, surface, color).with_dependency(pbr_pass));
graph_builder
}
}
| 32.547325 | 100 | 0.516753 |
386987eb181ea9c411c213b5a802f3b058122ded | 9,431 | use crate::utils::sugg::Sugg;
use crate::utils::{
differing_macro_contexts, eq_expr_value, is_type_diagnostic_item, snippet_with_applicability, span_lint_and_then,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Block, Expr, ExprKind, PatKind, QPath, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
declare_clippy_lint! {
/// **What it does:** Checks for manual swapping.
///
/// **Why is this bad?** The `std::mem::swap` function exposes the intent better
/// without deinitializing or copying either variable.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let mut a = 42;
/// let mut b = 1337;
///
/// let t = b;
/// b = a;
/// a = t;
/// ```
/// Use std::mem::swap():
/// ```rust
/// let mut a = 1;
/// let mut b = 2;
/// std::mem::swap(&mut a, &mut b);
/// ```
pub MANUAL_SWAP,
complexity,
"manual swap of two variables"
}
declare_clippy_lint! {
/// **What it does:** Checks for `foo = bar; bar = foo` sequences.
///
/// **Why is this bad?** This looks like a failed attempt to swap.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # let mut a = 1;
/// # let mut b = 2;
/// a = b;
/// b = a;
/// ```
/// If swapping is intended, use `swap()` instead:
/// ```rust
/// # let mut a = 1;
/// # let mut b = 2;
/// std::mem::swap(&mut a, &mut b);
/// ```
pub ALMOST_SWAPPED,
correctness,
"`foo = bar; bar = foo` sequence"
}
declare_lint_pass!(Swap => [MANUAL_SWAP, ALMOST_SWAPPED]);
impl<'tcx> LateLintPass<'tcx> for Swap {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'_>) {
check_manual_swap(cx, block);
check_suspicious_swap(cx, block);
}
}
/// Implementation of the `MANUAL_SWAP` lint.
fn check_manual_swap(cx: &LateContext<'_>, block: &Block<'_>) {
for w in block.stmts.windows(3) {
if_chain! {
// let t = foo();
if let StmtKind::Local(ref tmp) = w[0].kind;
if let Some(ref tmp_init) = tmp.init;
if let PatKind::Binding(.., ident, None) = tmp.pat.kind;
// foo() = bar();
if let StmtKind::Semi(ref first) = w[1].kind;
if let ExprKind::Assign(ref lhs1, ref rhs1, _) = first.kind;
// bar() = t;
if let StmtKind::Semi(ref second) = w[2].kind;
if let ExprKind::Assign(ref lhs2, ref rhs2, _) = second.kind;
if let ExprKind::Path(QPath::Resolved(None, ref rhs2)) = rhs2.kind;
if rhs2.segments.len() == 1;
if ident.as_str() == rhs2.segments[0].ident.as_str();
if eq_expr_value(cx, tmp_init, lhs1);
if eq_expr_value(cx, rhs1, lhs2);
then {
if let ExprKind::Field(ref lhs1, _) = lhs1.kind {
if let ExprKind::Field(ref lhs2, _) = lhs2.kind {
if lhs1.hir_id.owner == lhs2.hir_id.owner {
return;
}
}
}
let mut applicability = Applicability::MachineApplicable;
let slice = check_for_slice(cx, lhs1, lhs2);
let (replace, what, sugg) = if let Slice::NotSwappable = slice {
return;
} else if let Slice::Swappable(slice, idx1, idx2) = slice {
if let Some(slice) = Sugg::hir_opt(cx, slice) {
(
false,
format!(" elements of `{}`", slice),
format!(
"{}.swap({}, {})",
slice.maybe_par(),
snippet_with_applicability(cx, idx1.span, "..", &mut applicability),
snippet_with_applicability(cx, idx2.span, "..", &mut applicability),
),
)
} else {
(false, String::new(), String::new())
}
} else if let (Some(first), Some(second)) = (Sugg::hir_opt(cx, lhs1), Sugg::hir_opt(cx, rhs1)) {
(
true,
format!(" `{}` and `{}`", first, second),
format!("std::mem::swap({}, {})", first.mut_addr(), second.mut_addr()),
)
} else {
(true, String::new(), String::new())
};
let span = w[0].span.to(second.span);
span_lint_and_then(
cx,
MANUAL_SWAP,
span,
&format!("this looks like you are swapping{} manually", what),
|diag| {
if !sugg.is_empty() {
diag.span_suggestion(
span,
"try",
sugg,
applicability,
);
if replace {
diag.note("or maybe you should use `std::mem::replace`?");
}
}
}
);
}
}
}
}
enum Slice<'a> {
/// `slice.swap(idx1, idx2)` can be used
///
/// ## Example
///
/// ```rust
/// # let mut a = vec![0, 1];
/// let t = a[1];
/// a[1] = a[0];
/// a[0] = t;
/// // can be written as
/// a.swap(0, 1);
/// ```
Swappable(&'a Expr<'a>, &'a Expr<'a>, &'a Expr<'a>),
/// The `swap` function cannot be used.
///
/// ## Example
///
/// ```rust
/// # let mut a = [vec![1, 2], vec![3, 4]];
/// let t = a[0][1];
/// a[0][1] = a[1][0];
/// a[1][0] = t;
/// ```
NotSwappable,
/// Not a slice
None,
}
/// Checks if both expressions are index operations into "slice-like" types.
fn check_for_slice<'a>(cx: &LateContext<'_>, lhs1: &'a Expr<'_>, lhs2: &'a Expr<'_>) -> Slice<'a> {
if let ExprKind::Index(ref lhs1, ref idx1) = lhs1.kind {
if let ExprKind::Index(ref lhs2, ref idx2) = lhs2.kind {
if eq_expr_value(cx, lhs1, lhs2) {
let ty = cx.typeck_results().expr_ty(lhs1).peel_refs();
if matches!(ty.kind(), ty::Slice(_))
|| matches!(ty.kind(), ty::Array(_, _))
|| is_type_diagnostic_item(cx, ty, sym::vec_type)
|| is_type_diagnostic_item(cx, ty, sym!(vecdeque_type))
{
return Slice::Swappable(lhs1, idx1, idx2);
}
} else {
return Slice::NotSwappable;
}
}
}
Slice::None
}
/// Implementation of the `ALMOST_SWAPPED` lint.
fn check_suspicious_swap(cx: &LateContext<'_>, block: &Block<'_>) {
for w in block.stmts.windows(2) {
if_chain! {
if let StmtKind::Semi(ref first) = w[0].kind;
if let StmtKind::Semi(ref second) = w[1].kind;
if !differing_macro_contexts(first.span, second.span);
if let ExprKind::Assign(ref lhs0, ref rhs0, _) = first.kind;
if let ExprKind::Assign(ref lhs1, ref rhs1, _) = second.kind;
if eq_expr_value(cx, lhs0, rhs1);
if eq_expr_value(cx, lhs1, rhs0);
then {
let lhs0 = Sugg::hir_opt(cx, lhs0);
let rhs0 = Sugg::hir_opt(cx, rhs0);
let (what, lhs, rhs) = if let (Some(first), Some(second)) = (lhs0, rhs0) {
(
format!(" `{}` and `{}`", first, second),
first.mut_addr().to_string(),
second.mut_addr().to_string(),
)
} else {
(String::new(), String::new(), String::new())
};
let span = first.span.to(second.span);
span_lint_and_then(cx,
ALMOST_SWAPPED,
span,
&format!("this looks like you are trying to swap{}", what),
|diag| {
if !what.is_empty() {
diag.span_suggestion(
span,
"try",
format!(
"std::mem::swap({}, {})",
lhs,
rhs,
),
Applicability::MaybeIncorrect,
);
diag.note("or maybe you should use `std::mem::replace`?");
}
});
}
}
}
}
| 35.723485 | 117 | 0.421482 |
0adca8dcde701ccdc6380fe3229a83c7523243e6 | 17,379 | // Copyright 2017 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Data structures representing (multiple) selections and cursors.
use std::cmp::{min, max};
use std::ops::Deref;
use std::ops::Bound;
use std::ops::RangeBounds;
use index_set::remove_n_at;
use xi_rope::delta::{Delta, Transformer};
use xi_rope::rope::RopeInfo;
/// A type representing horizontal measurements. This is currently in units
/// that are not very well defined except that ASCII characters count as
/// 1 each. It will change.
pub type HorizPos = usize;
/// A set of zero or more selection regions, representing a selection state.
#[derive(Default, Debug, Clone)]
pub struct Selection {
// An invariant: regions[i].max() <= regions[i+1].min()
// and < if either is_caret()
regions: Vec<SelRegion>,
}
impl Selection {
/// Creates a new empty selection.
pub fn new() -> Selection {
Selection::default()
}
/// Creates a selection with a single region.
pub fn new_simple(region: SelRegion) -> Selection {
Selection {
regions: vec![region]
}
}
/// Clear the selection.
pub fn clear(&mut self) {
self.regions.clear();
}
/// Collapse all selections into a single caret.
pub fn collapse(&mut self) {
self.regions.truncate(1);
self.regions[0].start = self.regions[0].end;
}
// The smallest index so that offset > region.max() for all preceding
// regions.
pub fn search(&self, offset: usize) -> usize {
if self.regions.is_empty() || offset > self.regions.last().unwrap().max() {
return self.regions.len();
}
match self.regions.binary_search_by(|r| r.max().cmp(&offset)) {
Ok(ix) => ix,
Err(ix) => ix,
}
}
/// Add a region to the selection. This method implements merging logic.
///
/// Two non-caret regions merge if their interiors intersect; merely
/// touching at the edges does not cause a merge. A caret merges with
/// a non-caret if it is in the interior or on either edge. Two carets
/// merge if they are the same offset.
///
/// Performance note: should be O(1) if the new region strictly comes
/// after all the others in the selection, otherwise O(n).
pub fn add_region(&mut self, region: SelRegion) {
let mut ix = self.search(region.min());
if ix == self.regions.len() {
self.regions.push(region);
return;
}
let mut region = region;
let mut end_ix = ix;
if self.regions[ix].min() <= region.min() {
if self.regions[ix].should_merge(region) {
region = self.regions[ix].merge_with(region);
} else {
ix += 1;
}
end_ix += 1;
}
while end_ix < self.regions.len() && region.should_merge(self.regions[end_ix]) {
region = region.merge_with(self.regions[end_ix]);
end_ix += 1;
}
if ix == end_ix {
self.regions.insert(ix, region);
} else {
self.regions[ix] = region;
remove_n_at(&mut self.regions, ix + 1, end_ix - ix - 1);
}
}
/// Gets a slice of regions that intersect the given range. Regions that
/// merely touch the range at the edges are also included, so it is the
/// caller's responsibility to further trim them, in particular to only
/// display one caret in the upstream/downstream cases.
///
/// Performance note: O(log n).
pub fn regions_in_range(&self, start: usize, end: usize) -> &[SelRegion] {
let first = self.search(start);
let mut last = self.search(end);
if last < self.regions.len() && self.regions[last].min() <= end {
last += 1;
}
&self.regions[first..last]
}
/// Deletes all the regions that intersect or (if delete_adjacent = true) touch the given range.
pub fn delete_range(&mut self, start: usize, end: usize, delete_adjacent: bool) {
let mut first = self.search(start);
let mut last = self.search(end);
if first >= self.regions.len() {
return;
}
if !delete_adjacent && self.regions[first].max() == start {
first += 1;
}
if last < self.regions.len() && ((delete_adjacent && self.regions[last].min() <= end)
|| (!delete_adjacent && self.regions[last].min() < end)) {
last += 1;
}
remove_n_at(&mut self.regions, first, last - first);
}
/// Add a region to the selection. This method does not merge regions and does not allow
/// ambiguous regions (regions that overlap).
///
/// On ambiguous regions, the region with the lower start position wins. That is, in such a
/// case, the new region is either not added at all, because there is an ambiguous region with
/// a lower start position, or existing regions that intersect with the new region but do
/// not start before the new region, are deleted.
pub fn add_range_distinct(&mut self, region: SelRegion) -> (usize, usize) {
let mut ix = self.search(region.min());
if ix < self.regions.len() && self.regions[ix].max() == region.min() {
ix += 1;
}
if ix < self.regions.len() {
// in case of ambiguous regions the region closer to the left wins
let occ = &self.regions[ix];
let is_eq = occ.min() == region.min() && occ.max() == region.max();
let is_intersect_before = region.min() >= occ.min() && occ.max() > region.min();
if is_eq || is_intersect_before {
return (occ.min(), occ.max());
}
}
// delete ambiguous regions to the right
let mut last = self.search(region.max());
if last < self.regions.len() && self.regions[last].min() < region.max() {
last += 1;
}
remove_n_at(&mut self.regions, ix, last - ix);
if ix == self.regions.len() {
self.regions.push(region);
} else {
self.regions.insert(ix, region);
}
(self.regions[ix].min(), self.regions[ix].max())
}
/// Computes a new selection based on applying a delta to the old selection.
///
/// When new text is inserted at a caret, the new caret can be either before
/// or after the inserted text, depending on the `after` parameter.
///
/// Whether or not the preceding selections are restored depends on the keep_selections
/// value (only set to true on transpose).
pub fn apply_delta(&self, delta: &Delta<RopeInfo>, after: bool, keep_selections: bool) -> Selection {
let mut result = Selection::new();
let mut transformer = Transformer::new(delta);
for region in self.iter() {
let preserve_selection = keep_selections && region.start != region.end;
let start_after = {
if preserve_selection {
region.start > region.end
} else {
after
}
};
let end_after = {
if preserve_selection {
region.start < region.end
} else {
after
}
};
let new_region = SelRegion::new(
transformer.transform(region.start, start_after),
transformer.transform(region.end, end_after),
).with_affinity(region.affinity);
result.add_region(new_region);
}
result
}
}
/// Implementing the Deref trait allows callers to easily test `is_empty`, iterate
/// through all ranges, etc.
impl Deref for Selection {
type Target = [SelRegion];
fn deref(&self) -> &[SelRegion] {
&self.regions
}
}
/// The "affinity" of a cursor which is sitting exactly on a line break.
///
/// We say "cursor" here rather than "caret" because (depending on presentation)
/// the front-end may draw a cursor even when the region is not a caret.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Affinity {
/// The cursor should be displayed downstream of the line break. For
/// example, if the buffer is "abcd", and the cursor is on a line break
/// after "ab", it should be displayed on the second line before "cd".
Downstream,
/// The cursor should be displayed upstream of the line break. For
/// example, if the buffer is "abcd", and the cursor is on a line break
/// after "ab", it should be displayed on the previous line after "ab".
Upstream,
}
impl Default for Affinity {
fn default() -> Affinity {
Affinity::Downstream
}
}
/// A type representing a single contiguous region of a selection. We use the
/// term "caret" (sometimes also "cursor", more loosely) to refer to a selection
/// region with an empty interior. A "non-caret region" is one with a non-empty
/// interior (i.e. `start != end`).
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct SelRegion {
/// The inactive edge of a selection, as a byte offset. When
/// equal to end, the selection range acts as a caret.
pub start: usize,
/// The active edge of a selection, as a byte offset.
pub end: usize,
/// A saved horizontal position (used primarily for line up/down movement).
pub horiz: Option<HorizPos>,
/// The affinity of the cursor.
pub affinity: Affinity,
}
impl SelRegion {
/// Returns a new region.
pub fn new(start: usize, end: usize) -> Self {
Self {
start,
end,
horiz: None,
affinity: Affinity::default(),
}
}
/// Returns a new caret region (`start == end`).
pub fn caret(pos: usize) -> Self {
Self {
start: pos,
end: pos,
horiz: None,
affinity: Affinity::default(),
}
}
/// Returns a region with the given horizontal position.
pub fn with_horiz(self, horiz: Option<HorizPos>) -> Self {
Self {
horiz,
..self
}
}
/// Returns a region with the given affinity.
pub fn with_affinity(self, affinity: Affinity) -> Self {
Self {
affinity,
..self
}
}
/// Gets the earliest offset within the region, ie the minimum of both edges.
pub fn min(self) -> usize {
min(self.start, self.end)
}
/// Gets the latest offset within the region, ie the maximum of both edges.
pub fn max(self) -> usize {
max(self.start, self.end)
}
/// Determines whether the region is a caret (ie has an empty interior).
pub fn is_caret(self) -> bool {
self.start == self.end
}
/// Determines whether the region's affinity is upstream.
pub fn is_upstream(self) -> bool {
self.affinity == Affinity::Upstream
}
// Indicate whether this region should merge with the next.
// Assumption: regions are sorted (self.min() <= other.min())
fn should_merge(self, other: SelRegion) -> bool {
other.min() < self.max() ||
((self.is_caret() || other.is_caret()) && other.min() == self.max())
}
fn merge_with(self, other: SelRegion) -> SelRegion {
let is_forward = self.end > self.start || other.end > other.start;
let new_min = min(self.min(), other.min());
let new_max = max(self.max(), other.max());
let (start, end) = if is_forward {
(new_min, new_max)
} else {
(new_max, new_min)
};
// Could try to preserve horiz/affinity from one of the
// sources, but very likely not worth it.
SelRegion::new(start, end)
}
}
// Returns `[min..max)`
impl<'a> RangeBounds<usize> for &'a SelRegion {
fn start_bound(&self) -> Bound<&usize> {
Bound::Included(min(&self.start, &self.end))
}
fn end_bound(&self) -> Bound<&usize> {
Bound::Excluded(max(&self.start, &self.end))
}
}
impl From<SelRegion> for Selection {
fn from(region: SelRegion) -> Self {
Self::new_simple(region)
}
}
#[cfg(test)]
mod tests {
use super::{Selection, SelRegion};
use std::ops::Deref;
fn r(start: usize, end: usize) -> SelRegion {
SelRegion::new(start, end)
}
#[test]
fn empty() {
let s = Selection::new();
assert!(s.is_empty());
assert_eq!(s.deref(), &[]);
}
#[test]
fn simple_region() {
let s = Selection::new_simple(r(3, 5));
assert!(!s.is_empty());
assert_eq!(s.deref(), &[r(3, 5)]);
}
#[test]
fn from_selregion() {
let s: Selection = r(3, 5).into();
assert!(!s.is_empty());
assert_eq!(s.deref(), &[r(3, 5)]);
}
#[test]
fn delete_range() {
let mut s = Selection::new_simple(r(3, 5));
s.delete_range(1, 2, true);
assert_eq!(s.deref(), &[r(3, 5)]);
s.delete_range(1, 3, false);
assert_eq!(s.deref(), &[r(3, 5)]);
s.delete_range(1, 3, true);
assert_eq!(s.deref(), &[]);
let mut s = Selection::new_simple(r(3, 5));
s.delete_range(5, 6, false);
assert_eq!(s.deref(), &[r(3, 5)]);
s.delete_range(5, 6, true);
assert_eq!(s.deref(), &[]);
let mut s = Selection::new_simple(r(3, 5));
s.delete_range(2, 4, false);
assert_eq!(s.deref(), &[]);
assert_eq!(s.deref(), &[]);
let mut s = Selection::new();
s.add_region(r(3, 5));
s.add_region(r(7, 8));
s.delete_range(2, 10, false);
assert_eq!(s.deref(), &[]);
}
#[test]
fn simple_regions_in_range() {
let s = Selection::new_simple(r(3, 5));
assert_eq!(s.regions_in_range(0, 1), &[]);
assert_eq!(s.regions_in_range(0, 2), &[]);
assert_eq!(s.regions_in_range(0, 3), &[r(3, 5)]);
assert_eq!(s.regions_in_range(0, 4), &[r(3, 5)]);
assert_eq!(s.regions_in_range(5, 6), &[r(3, 5)]);
assert_eq!(s.regions_in_range(6, 7), &[]);
}
#[test]
fn caret_regions_in_range() {
let s = Selection::new_simple(r(4, 4));
assert_eq!(s.regions_in_range(0, 1), &[]);
assert_eq!(s.regions_in_range(0, 2), &[]);
assert_eq!(s.regions_in_range(0, 3), &[]);
assert_eq!(s.regions_in_range(0, 4), &[r(4, 4)]);
assert_eq!(s.regions_in_range(4, 4), &[r(4, 4)]);
assert_eq!(s.regions_in_range(4, 5), &[r(4, 4)]);
assert_eq!(s.regions_in_range(5, 6), &[]);
}
#[test]
fn merge_regions() {
let mut s = Selection::new();
s.add_region(r(3, 5));
assert_eq!(s.deref(), &[r(3, 5)]);
s.add_region(r(7, 9));
assert_eq!(s.deref(), &[r(3, 5), r(7, 9)]);
s.add_region(r(1, 3));
assert_eq!(s.deref(), &[r(1, 3), r(3, 5), r(7, 9)]);
s.add_region(r(4, 6));
assert_eq!(s.deref(), &[r(1, 3), r(3, 6), r(7, 9)]);
s.add_region(r(2, 8));
assert_eq!(s.deref(), &[r(1, 9)]);
s.clear();
assert_eq!(s.deref(), &[]);
s.add_region(r(1, 4));
s.add_region(r(4, 5));
s.add_region(r(5, 6));
s.add_region(r(6, 9));
assert_eq!(s.deref(), &[r(1, 4), r(4, 5), r(5, 6), r(6, 9)]);
s.add_region(r(2, 8));
assert_eq!(s.deref(), &[r(1, 9)]);
}
#[test]
fn merge_carets() {
let mut s = Selection::new();
s.add_region(r(1, 1));
assert_eq!(s.deref(), &[r(1, 1)]);
s.add_region(r(3, 3));
assert_eq!(s.deref(), &[r(1, 1), r(3, 3)]);
s.add_region(r(2, 2));
assert_eq!(s.deref(), &[r(1, 1), r(2, 2), r(3, 3)]);
s.add_region(r(1, 1));
assert_eq!(s.deref(), &[r(1, 1), r(2, 2), r(3, 3)]);
}
#[test]
fn merge_region_caret() {
let mut s = Selection::new();
s.add_region(r(3, 5));
assert_eq!(s.deref(), &[r(3, 5)]);
s.add_region(r(3, 3));
assert_eq!(s.deref(), &[r(3, 5)]);
s.add_region(r(4, 4));
assert_eq!(s.deref(), &[r(3, 5)]);
s.add_region(r(5, 5));
assert_eq!(s.deref(), &[r(3, 5)]);
s.add_region(r(6, 6));
assert_eq!(s.deref(), &[r(3, 5), r(6, 6)]);
}
#[test]
fn merge_reverse() {
let mut s = Selection::new();
s.add_region(r(5, 3));
assert_eq!(s.deref(), &[r(5, 3)]);
s.add_region(r(9, 7));
assert_eq!(s.deref(), &[r(5, 3), r(9, 7)]);
s.add_region(r(3, 1));
assert_eq!(s.deref(), &[r(3, 1), r(5, 3), r(9, 7)]);
s.add_region(r(6, 4));
assert_eq!(s.deref(), &[r(3, 1), r(6, 3), r(9, 7)]);
s.add_region(r(8, 2));
assert_eq!(s.deref(), &[r(9, 1)]);
}
}
| 33.485549 | 105 | 0.558202 |
cceb373d552a86ba682c5f63121bf6496979f2fa | 9,736 | use crate::iter::LoopState;
use crate::ops::Try;
/// An iterator able to yield elements from both ends.
///
/// Something that implements `DoubleEndedIterator` has one extra capability
/// over something that implements [`Iterator`]: the ability to also take
/// `Item`s from the back, as well as the front.
///
/// It is important to note that both back and forth work on the same range,
/// and do not cross: iteration is over when they meet in the middle.
///
/// In a similar fashion to the [`Iterator`] protocol, once a
/// `DoubleEndedIterator` returns `None` from a `next_back()`, calling it again
/// may or may not ever return `Some` again. `next()` and `next_back()` are
/// interchangeable for this purpose.
///
/// [`Iterator`]: trait.Iterator.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let numbers = vec![1, 2, 3, 4, 5, 6];
///
/// let mut iter = numbers.iter();
///
/// assert_eq!(Some(&1), iter.next());
/// assert_eq!(Some(&6), iter.next_back());
/// assert_eq!(Some(&5), iter.next_back());
/// assert_eq!(Some(&2), iter.next());
/// assert_eq!(Some(&3), iter.next());
/// assert_eq!(Some(&4), iter.next());
/// assert_eq!(None, iter.next());
/// assert_eq!(None, iter.next_back());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait DoubleEndedIterator: Iterator {
/// Removes and returns an element from the end of the iterator.
///
/// Returns `None` when there are no more elements.
///
/// The [trait-level] docs contain more details.
///
/// [trait-level]: trait.DoubleEndedIterator.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let numbers = vec![1, 2, 3, 4, 5, 6];
///
/// let mut iter = numbers.iter();
///
/// assert_eq!(Some(&1), iter.next());
/// assert_eq!(Some(&6), iter.next_back());
/// assert_eq!(Some(&5), iter.next_back());
/// assert_eq!(Some(&2), iter.next());
/// assert_eq!(Some(&3), iter.next());
/// assert_eq!(Some(&4), iter.next());
/// assert_eq!(None, iter.next());
/// assert_eq!(None, iter.next_back());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn next_back(&mut self) -> Option<Self::Item>;
/// Returns the `n`th element from the end of the iterator.
///
/// This is essentially the reversed version of [`nth`]. Although like most indexing
/// operations, the count starts from zero, so `nth_back(0)` returns the first value from
/// the end, `nth_back(1)` the second, and so on.
///
/// Note that all elements between the end and the returned element will be
/// consumed, including the returned element. This also means that calling
/// `nth_back(0)` multiple times on the same iterator will return different
/// elements.
///
/// `nth_back()` will return [`None`] if `n` is greater than or equal to the length of the
/// iterator.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [`nth`]: ../../std/iter/trait.Iterator.html#method.nth
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let a = [1, 2, 3];
/// assert_eq!(a.iter().nth_back(2), Some(&1));
/// ```
///
/// Calling `nth_back()` multiple times doesn't rewind the iterator:
///
/// ```
/// let a = [1, 2, 3];
///
/// let mut iter = a.iter();
///
/// assert_eq!(iter.nth_back(1), Some(&2));
/// assert_eq!(iter.nth_back(1), None);
/// ```
///
/// Returning `None` if there are less than `n + 1` elements:
///
/// ```
/// let a = [1, 2, 3];
/// assert_eq!(a.iter().nth_back(10), None);
/// ```
#[inline]
#[stable(feature = "iter_nth_back", since = "1.37.0")]
fn nth_back(&mut self, mut n: usize) -> Option<Self::Item> {
for x in self.rev() {
if n == 0 {
return Some(x);
}
n -= 1;
}
None
}
/// This is the reverse version of [`try_fold()`]: it takes elements
/// starting from the back of the iterator.
///
/// [`try_fold()`]: trait.Iterator.html#method.try_fold
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let a = ["1", "2", "3"];
/// let sum = a.iter()
/// .map(|&s| s.parse::<i32>())
/// .try_rfold(0, |acc, x| x.and_then(|y| Ok(acc + y)));
/// assert_eq!(sum, Ok(6));
/// ```
///
/// Short-circuiting:
///
/// ```
/// let a = ["1", "rust", "3"];
/// let mut it = a.iter();
/// let sum = it
/// .by_ref()
/// .map(|&s| s.parse::<i32>())
/// .try_rfold(0, |acc, x| x.and_then(|y| Ok(acc + y)));
/// assert!(sum.is_err());
///
/// // Because it short-circuited, the remaining elements are still
/// // available through the iterator.
/// assert_eq!(it.next_back(), Some(&"1"));
/// ```
#[inline]
#[stable(feature = "iterator_try_fold", since = "1.27.0")]
fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
where
Self: Sized,
F: FnMut(B, Self::Item) -> R,
R: Try<Ok = B>,
{
let mut accum = init;
while let Some(x) = self.next_back() {
accum = f(accum, x)?;
}
Try::from_ok(accum)
}
/// An iterator method that reduces the iterator's elements to a single,
/// final value, starting from the back.
///
/// This is the reverse version of [`fold()`]: it takes elements starting from
/// the back of the iterator.
///
/// `rfold()` takes two arguments: an initial value, and a closure with two
/// arguments: an 'accumulator', and an element. The closure returns the value that
/// the accumulator should have for the next iteration.
///
/// The initial value is the value the accumulator will have on the first
/// call.
///
/// After applying this closure to every element of the iterator, `rfold()`
/// returns the accumulator.
///
/// This operation is sometimes called 'reduce' or 'inject'.
///
/// Folding is useful whenever you have a collection of something, and want
/// to produce a single value from it.
///
/// [`fold()`]: trait.Iterator.html#method.fold
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let a = [1, 2, 3];
///
/// // the sum of all of the elements of a
/// let sum = a.iter()
/// .rfold(0, |acc, &x| acc + x);
///
/// assert_eq!(sum, 6);
/// ```
///
/// This example builds a string, starting with an initial value
/// and continuing with each element from the back until the front:
///
/// ```
/// let numbers = [1, 2, 3, 4, 5];
///
/// let zero = "0".to_string();
///
/// let result = numbers.iter().rfold(zero, |acc, &x| {
/// format!("({} + {})", x, acc)
/// });
///
/// assert_eq!(result, "(1 + (2 + (3 + (4 + (5 + 0)))))");
/// ```
#[inline]
#[stable(feature = "iter_rfold", since = "1.27.0")]
fn rfold<B, F>(mut self, init: B, mut f: F) -> B
where
Self: Sized,
F: FnMut(B, Self::Item) -> B,
{
let mut accum = init;
while let Some(x) = self.next_back() {
accum = f(accum, x);
}
accum
}
/// Searches for an element of an iterator from the back that satisfies a predicate.
///
/// `rfind()` takes a closure that returns `true` or `false`. It applies
/// this closure to each element of the iterator, starting at the end, and if any
/// of them return `true`, then `rfind()` returns [`Some(element)`]. If they all return
/// `false`, it returns [`None`].
///
/// `rfind()` is short-circuiting; in other words, it will stop processing
/// as soon as the closure returns `true`.
///
/// Because `rfind()` takes a reference, and many iterators iterate over
/// references, this leads to a possibly confusing situation where the
/// argument is a double reference. You can see this effect in the
/// examples below, with `&&x`.
///
/// [`Some(element)`]: ../../std/option/enum.Option.html#variant.Some
/// [`None`]: ../../std/option/enum.Option.html#variant.None
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let a = [1, 2, 3];
///
/// assert_eq!(a.iter().rfind(|&&x| x == 2), Some(&2));
///
/// assert_eq!(a.iter().rfind(|&&x| x == 5), None);
/// ```
///
/// Stopping at the first `true`:
///
/// ```
/// let a = [1, 2, 3];
///
/// let mut iter = a.iter();
///
/// assert_eq!(iter.rfind(|&&x| x == 2), Some(&2));
///
/// // we can still use `iter`, as there are more elements.
/// assert_eq!(iter.next_back(), Some(&1));
/// ```
#[inline]
#[stable(feature = "iter_rfind", since = "1.27.0")]
fn rfind<P>(&mut self, predicate: P) -> Option<Self::Item>
where
Self: Sized,
P: FnMut(&Self::Item) -> bool,
{
#[inline]
fn check<T>(
mut predicate: impl FnMut(&T) -> bool,
) -> impl FnMut((), T) -> LoopState<(), T> {
move |(), x| {
if predicate(&x) { LoopState::Break(x) } else { LoopState::Continue(()) }
}
}
self.try_rfold((), check(predicate)).break_value()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for &'a mut I {
fn next_back(&mut self) -> Option<I::Item> {
(**self).next_back()
}
fn nth_back(&mut self, n: usize) -> Option<I::Item> {
(**self).nth_back(n)
}
}
| 31.713355 | 94 | 0.526191 |
165039241229613ae43b3a630794f0bdd0c51317 | 12,738 | #![cfg(not(feature="safe"))]
#[cfg(feature="std")]
use std::{panic,thread, io};
#[cfg(feature="std")]
use std::io::Write;
use core;
use brotli_decompressor::ffi::alloc_util;
use brotli_decompressor::ffi::alloc_util::SubclassableAllocator;
use brotli_decompressor::ffi::interface::{
brotli_alloc_func,
brotli_free_func,
CAllocator,
c_void,
};
use brotli_decompressor::ffi::{
slice_from_raw_parts_or_nil,
slice_from_raw_parts_or_nil_mut,
};
use ::enc::encode::BrotliEncoderStateStruct;
use super::alloc_util::BrotliSubclassableAllocator;
#[repr(C)]
pub enum BrotliEncoderOperation {
BROTLI_OPERATION_PROCESS = 0,
BROTLI_OPERATION_FLUSH = 1,
BROTLI_OPERATION_FINISH = 2,
BROTLI_OPERATION_EMIT_METADATA = 3,
}
#[repr(C)]
pub enum BrotliEncoderMode {
BROTLI_MODE_GENERIC = 0,
BROTLI_MODE_TEXT = 1,
BROTLI_MODE_FONT = 2,
BROTLI_MODE_FORCE_LSB_PRIOR = 3,
BROTLI_MODE_FORCE_MSB_PRIOR = 4,
BROTLI_MODE_FORCE_UTF8_PRIOR = 5,
BROTLI_MODE_FORCE_SIGNED_PRIOR = 6,
}
#[repr(C)]
pub struct BrotliEncoderState {
pub custom_allocator: CAllocator,
pub compressor: BrotliEncoderStateStruct<BrotliSubclassableAllocator>,
}
#[cfg(not(feature="std"))]
fn brotli_new_compressor_without_custom_alloc(_to_box: BrotliEncoderState) -> *mut BrotliEncoderState{
panic!("Must supply allocators if calling divans when compiled without features=std");
}
#[cfg(feature="std")]
fn brotli_new_compressor_without_custom_alloc(to_box: BrotliEncoderState) -> *mut BrotliEncoderState{
alloc_util::Box::<BrotliEncoderState>::into_raw(
alloc_util::Box::<BrotliEncoderState>::new(to_box))
}
#[cfg(feature="std")]
unsafe fn free_compressor_no_custom_alloc(state_ptr: *mut BrotliEncoderState) {
let _state = alloc_util::Box::from_raw(state_ptr);
}
#[cfg(not(feature="std"))]
unsafe fn free_compressor_no_custom_alloc(_state_ptr: *mut BrotliEncoderState) {
unreachable!();
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderCreateInstance(
alloc_func: brotli_alloc_func,
free_func: brotli_free_func,
opaque: *mut c_void,
) -> *mut BrotliEncoderState {
match catch_panic_cstate(|| {
let allocators = CAllocator {
alloc_func:alloc_func,
free_func:free_func,
opaque:opaque,
};
let to_box = BrotliEncoderState {
custom_allocator: allocators.clone(),
compressor: ::enc::encode::BrotliEncoderCreateInstance(
BrotliSubclassableAllocator::new(
SubclassableAllocator::new(allocators.clone())),
),
};
if let Some(alloc) = alloc_func {
if free_func.is_none() {
panic!("either both alloc and free must exist or neither");
}
let ptr = alloc(allocators.opaque, core::mem::size_of::<BrotliEncoderState>());
let brotli_decoder_state_ptr = core::mem::transmute::<*mut c_void, *mut BrotliEncoderState>(ptr);
core::ptr::write(brotli_decoder_state_ptr, to_box);
brotli_decoder_state_ptr
} else {
brotli_new_compressor_without_custom_alloc(to_box)
}
}) {
Ok(ret) => ret,
Err(err) => {
error_print(err);
core::ptr::null_mut()
}
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderSetParameter(
state_ptr: *mut BrotliEncoderState,
param: ::enc::encode::BrotliEncoderParameter,
value: u32,
) -> i32 {
::enc::encode::BrotliEncoderSetParameter(&mut (*state_ptr).compressor, param, value)
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderDestroyInstance(state_ptr: *mut BrotliEncoderState) {
::enc::encode::BrotliEncoderDestroyInstance(&mut (*state_ptr).compressor);
if let Some(_) = (*state_ptr).custom_allocator.alloc_func {
if let Some(free_fn) = (*state_ptr).custom_allocator.free_func {
let _to_free = core::ptr::read(state_ptr);
let ptr = core::mem::transmute::<*mut BrotliEncoderState, *mut c_void>(state_ptr);
free_fn((*state_ptr).custom_allocator.opaque, ptr);
}
} else {
free_compressor_no_custom_alloc(state_ptr);
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderIsFinished(
state_ptr: *mut BrotliEncoderState,
) -> i32 {
::enc::encode::BrotliEncoderIsFinished(&mut (*state_ptr).compressor)
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderHasMoreOutput(
state_ptr: *mut BrotliEncoderState,
) -> i32 {
::enc::encode::BrotliEncoderHasMoreOutput(&mut (*state_ptr).compressor)
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderSetCustomDictionary(
state_ptr: *mut BrotliEncoderState,
size: usize,
dict: *const u8,
) {
if let Err(panic_err) = catch_panic(|| {
let dict_slice = slice_from_raw_parts_or_nil(dict, size);
::enc::encode::BrotliEncoderSetCustomDictionary(&mut (*state_ptr).compressor, size, dict_slice);
0
}) {
error_print(panic_err);
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderTakeOutput(
state_ptr: *mut BrotliEncoderState,
size: *mut usize,
) -> *const u8 {
::enc::encode::BrotliEncoderTakeOutput(&mut (*state_ptr).compressor, &mut *size).as_ptr()
}
#[no_mangle]
pub extern fn BrotliEncoderVersion() -> u32 {
::enc::encode::BrotliEncoderVersion()
}
#[no_mangle]
pub extern fn BrotliEncoderMaxCompressedSize(input_size: usize) -> usize {
::enc::encode::BrotliEncoderMaxCompressedSize(input_size)
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderCompress(
quality: i32,
lgwin: i32,
mode: BrotliEncoderMode,
input_size: usize,
input_buffer: *const u8,
encoded_size: *mut usize,
encoded_buffer: *mut u8) -> i32 {
match catch_panic(|| {
let input_buf = slice_from_raw_parts_or_nil(input_buffer, input_size);
let encoded_buf = slice_from_raw_parts_or_nil_mut(encoded_buffer, *encoded_size);
let allocators = CAllocator {
alloc_func:None,
free_func:None,
opaque:core::ptr::null_mut(),
};
let translated_mode = match mode {
BrotliEncoderMode::BROTLI_MODE_GENERIC =>
::enc::backward_references::BrotliEncoderMode::BROTLI_MODE_GENERIC,
BrotliEncoderMode::BROTLI_MODE_TEXT =>
::enc::backward_references::BrotliEncoderMode::BROTLI_MODE_TEXT,
BrotliEncoderMode::BROTLI_MODE_FONT =>
::enc::backward_references::BrotliEncoderMode::BROTLI_MODE_FONT,
BrotliEncoderMode::BROTLI_MODE_FORCE_LSB_PRIOR =>
::enc::backward_references::BrotliEncoderMode::BROTLI_FORCE_LSB_PRIOR,
BrotliEncoderMode::BROTLI_MODE_FORCE_MSB_PRIOR =>
::enc::backward_references::BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR,
BrotliEncoderMode::BROTLI_MODE_FORCE_UTF8_PRIOR =>
::enc::backward_references::BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR,
BrotliEncoderMode::BROTLI_MODE_FORCE_SIGNED_PRIOR =>
::enc::backward_references::BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR,
};
let mut m8 = BrotliSubclassableAllocator::new(
SubclassableAllocator::new(allocators.clone()));
let empty_m8 = BrotliSubclassableAllocator::new(
SubclassableAllocator::new(allocators.clone()));
::enc::encode::BrotliEncoderCompress(
empty_m8,
&mut m8,
quality,
lgwin,
translated_mode,
input_size,
input_buf,
&mut *encoded_size,
encoded_buf,
&mut |_a,_b,_c,_d|(),
)
}) {
Ok(ret) => ret,
Err(panic_err) => {
error_print(panic_err);
0
},
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderCompressStreaming(
state_ptr: *mut BrotliEncoderState,
op: BrotliEncoderOperation,
available_in: *mut usize,
mut input_buf: *const u8,
available_out: *mut usize,
mut output_buf: *mut u8,
) -> i32 {
BrotliEncoderCompressStream(state_ptr,
op,
available_in,
&mut input_buf,
available_out,
&mut output_buf,
core::ptr::null_mut())
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderCompressStream(
state_ptr: *mut BrotliEncoderState,
op: BrotliEncoderOperation,
available_in: *mut usize,
input_buf_ptr: *mut*const u8,
available_out: *mut usize,
output_buf_ptr: *mut*mut u8,
total_out: *mut usize) -> i32 {
match catch_panic(|| {
let mut input_offset = 0usize;
let mut output_offset = 0usize;
let result;
let translated_op = match op {
BrotliEncoderOperation::BROTLI_OPERATION_PROCESS =>
::enc::encode::BrotliEncoderOperation::BROTLI_OPERATION_PROCESS,
BrotliEncoderOperation::BROTLI_OPERATION_FLUSH =>
::enc::encode::BrotliEncoderOperation::BROTLI_OPERATION_FLUSH,
BrotliEncoderOperation::BROTLI_OPERATION_FINISH =>
::enc::encode::BrotliEncoderOperation::BROTLI_OPERATION_FINISH,
BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA =>
::enc::encode::BrotliEncoderOperation::BROTLI_OPERATION_EMIT_METADATA,
};
{
let (input_buf, input_any):(&[u8],bool) = if *available_in != 0 {
(slice_from_raw_parts_or_nil(*input_buf_ptr, *available_in), true)
} else {
(&[], false)
};
let (output_buf, output_any):(&mut[u8],bool) = if *available_out != 0 {
(slice_from_raw_parts_or_nil_mut(*output_buf_ptr, *available_out), true)
} else {
(&mut [], false)
};
let mut to = Some(0usize);
result = ::enc::encode::BrotliEncoderCompressStream(
&mut (*state_ptr).compressor,
translated_op,
&mut *available_in,
input_buf,
&mut input_offset,
&mut *available_out,
output_buf,
&mut output_offset,
&mut to,
&mut |_a,_b,_c,_d|(),
);
if !total_out.is_null() {
*total_out = to.unwrap_or(0);
}
if input_any {
*input_buf_ptr = (*input_buf_ptr).offset(input_offset as isize);
}
if output_any {
*output_buf_ptr = (*output_buf_ptr).offset(output_offset as isize);
}
}
result
}) {
Ok(ret) => ret,
Err(panic_err) => {
error_print(panic_err);
0
},
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderMallocU8(state_ptr: *mut BrotliEncoderState, size: usize) -> *mut u8 {
if let Some(alloc_fn) = (*state_ptr).custom_allocator.alloc_func {
return core::mem::transmute::<*mut c_void, *mut u8>(alloc_fn((*state_ptr).custom_allocator.opaque, size));
} else {
return alloc_util::alloc_stdlib(size);
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderFreeU8(state_ptr: *mut BrotliEncoderState, data: *mut u8, size: usize) {
if let Some(free_fn) = (*state_ptr).custom_allocator.free_func {
free_fn((*state_ptr).custom_allocator.opaque, core::mem::transmute::<*mut u8, *mut c_void>(data));
} else {
alloc_util::free_stdlib(data, size);
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderMallocUsize(state_ptr: *mut BrotliEncoderState, size: usize) -> *mut usize {
if let Some(alloc_fn) = (*state_ptr).custom_allocator.alloc_func {
return core::mem::transmute::<*mut c_void, *mut usize>(alloc_fn((*state_ptr).custom_allocator.opaque,
size * core::mem::size_of::<usize>()));
} else {
return alloc_util::alloc_stdlib(size);
}
}
#[no_mangle]
pub unsafe extern fn BrotliEncoderFreeUsize(state_ptr: *mut BrotliEncoderState, data: *mut usize, size: usize) {
if let Some(free_fn) = (*state_ptr).custom_allocator.free_func {
free_fn((*state_ptr).custom_allocator.opaque, core::mem::transmute::<*mut usize, *mut c_void>(data));
} else {
alloc_util::free_stdlib(data, size);
}
}
#[cfg(all(feature="std", not(feature="pass-through-ffi-panics")))]
pub fn catch_panic<F:FnOnce()->i32+panic::UnwindSafe>(f: F) -> thread::Result<i32> {
panic::catch_unwind(f)
}
#[cfg(all(feature="std", not(feature="pass-through-ffi-panics")))]
fn catch_panic_cstate<F:FnOnce()->*mut BrotliEncoderState+panic::UnwindSafe>(f: F) -> thread::Result<*mut BrotliEncoderState> {
panic::catch_unwind(f)
}
#[cfg(all(feature="std", not(feature="pass-through-ffi-panics")))]
fn error_print<Err:core::fmt::Debug>(err: Err) {
let _ign = writeln!(&mut io::stderr(), "Internal Error {:?}", err);
}
// can't catch panics in a reliable way without std:: configure with panic=abort. These shouldn't happen
#[cfg(any(not(feature="std"), feature="pass-through-ffi-panics"))]
pub fn catch_panic<F:FnOnce()->i32>(f: F) -> Result<i32, ()> {
Ok(f())
}
#[cfg(any(not(feature="std"), feature="pass-through-ffi-panics"))]
fn catch_panic_cstate<F:FnOnce()->*mut BrotliEncoderState>(f: F) -> Result<*mut BrotliEncoderState, ()> {
Ok(f())
}
#[cfg(any(not(feature="std"), feature="pass-through-ffi-panics"))]
fn error_print<Err>(_err: Err) {
}
| 32.578005 | 127 | 0.680562 |
14ccc12e3653be42607ca9c3d389563d6a4fbcf8 | 1,622 | use std::ops::{Index, IndexMut};
use std::sync::atomic::{AtomicU8, Ordering};
use flume::Sender;
use crate::app::message::Message;
use crate::app::sort::Sortable;
#[derive(Debug)]
pub struct ArrayController {
array: Vec<u8>,
sender: Sender<Message>,
last_mut: (usize, AtomicU8),
}
impl ArrayController {
pub fn new(array: Vec<u8>, sender: Sender<Message>) -> Self {
let last_mut = (0, AtomicU8::new(array[0]));
Self {
array,
sender,
last_mut,
}
}
fn send_set_if_last_mut_changed(&self) {
let last_mut_value = self.array[self.last_mut.0];
if last_mut_value != self.last_mut.1.swap(last_mut_value, Ordering::Relaxed) {
self.sender
.send(Message::set(self.last_mut.0, last_mut_value))
.unwrap();
}
}
}
impl Index<usize> for ArrayController {
type Output = u8;
fn index(&self, index: usize) -> &Self::Output {
self.send_set_if_last_mut_changed();
self.sender.send(Message::get(index)).unwrap();
&self.array[index]
}
}
impl IndexMut<usize> for ArrayController {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
self.send_set_if_last_mut_changed();
self.last_mut.0 = index;
self.last_mut.1.store(self.array[index], Ordering::Relaxed);
&mut self.array[index]
}
}
impl Sortable for ArrayController {
fn len(&self) -> usize {
self.array.len()
}
}
impl Drop for ArrayController {
fn drop(&mut self) {
self.send_set_if_last_mut_changed();
}
}
| 23.171429 | 86 | 0.602959 |
e82ef8b3b52bfae74c8ba2f3d7fe1028918c9b3e | 20,508 | //! metrics tracked by kitsune_p2p spaces
use std::collections::HashMap;
use std::collections::VecDeque;
use std::sync::Arc;
use tokio::time::Instant;
use crate::types::event::*;
use crate::types::*;
use kitsune_p2p_timestamp::Timestamp;
use kitsune_p2p_types::agent_info::AgentInfoSigned;
use num_traits::*;
/// how long historical metric records should be kept
/// (currently set to 1 week)
const HISTORICAL_RECORD_EXPIRE_DURATION_MICROS: i64 = 1000 * 1000 * 60 * 60 * 24 * 7;
/// Running average that prioritizes memory and cpu efficiency
/// over strict accuracy.
/// For metrics where we can't afford the memory of tracking samples
/// for every remote we might talk to, this running average is
/// accurate enough and uses only 5 bytes of memory.
#[derive(Debug, Clone, Copy)]
pub struct RunAvg(f32, u8);
impl Default for RunAvg {
fn default() -> Self {
Self(0.0, 0)
}
}
impl RunAvg {
/// Push a new data point onto the running average
pub fn push<V: AsPrimitive<f32>>(&mut self, v: V) {
self.push_n(v, 1);
}
/// Push multiple entries (up to 255) of the same value onto the average
pub fn push_n<V: AsPrimitive<f32>>(&mut self, v: V, count: u8) {
self.1 = self.1.saturating_add(count);
self.0 = (self.0 * (self.1 - count) as f32 + (v.as_() * count as f32)) / self.1 as f32;
}
}
macro_rules! mk_from {
($($t:ty,)*) => {$(
impl From<$t> for RunAvg {
fn from(o: $t) -> Self {
Self(o as f32, 1)
}
}
)*};
}
mk_from! {
i8,
u8,
i16,
u16,
i32,
u32,
i64,
u64,
f32,
f64,
}
impl std::ops::Deref for RunAvg {
type Target = f32;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl AsRef<f32> for RunAvg {
fn as_ref(&self) -> &f32 {
&self.0
}
}
impl std::borrow::Borrow<f32> for RunAvg {
fn borrow(&self) -> &f32 {
&self.0
}
}
impl std::fmt::Display for RunAvg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
/// The maximum number of different nodes that will be
/// gossiped with if gossip is triggered.
const MAX_TRIGGERS: u8 = 2;
/// Maximum amount of history we will track
/// per remote node.
const MAX_HISTORY: usize = 10;
#[derive(Debug, Clone, Default)]
/// Information about a remote node.
struct NodeInfo {
/// Sucessful and unsuccessful messages from the remote
/// can be combined to estimate a "reachability quotient"
/// between 1 (or 0 if empty) and 100. Errors are weighted
/// heavier because we retry less frequently.
reachability_quotient: RunAvg,
/// Running average for latency microseconds for any direct
/// request/response calls to remote agent.
latency_micros: RunAvg,
/// Times we recorded errors for this node.
errors: VecDeque<Instant>,
/// Times we recorded initiates to this node.
initiates: VecDeque<Instant>,
/// Times we recorded remote rounds from this node.
remote_rounds: VecDeque<Instant>,
/// Times we recorded complete rounds for this node.
complete_rounds: VecDeque<Instant>,
/// Is this node currently in an active round?
current_round: bool,
}
#[derive(Debug, Default)]
/// Metrics tracking for remote nodes to help
/// choose which remote node to initiate the next round with.
pub struct Metrics {
/// Map of remote agents.
map: HashMap<Arc<KitsuneAgent>, NodeInfo>,
/// Aggregate Extrapolated Dht Coverage
agg_extrap_cov: RunAvg,
// Number of times we need to force initiate
// the next round.
force_initiates: u8,
}
/// Outcome of a gossip round.
#[derive(PartialOrd, Ord, PartialEq, Eq)]
pub enum RoundOutcome {
/// Success outcome
Success(Instant),
/// Error outcome
Error(Instant),
}
/// Accept differing key types
pub enum AgentLike<'lt> {
/// An agent info
Info(&'lt AgentInfoSigned),
/// A raw agent pubkey
PubKey(&'lt Arc<KitsuneAgent>),
}
impl<'lt> From<&'lt AgentInfoSigned> for AgentLike<'lt> {
fn from(i: &'lt AgentInfoSigned) -> Self {
Self::Info(i)
}
}
impl<'lt> From<&'lt Arc<KitsuneAgent>> for AgentLike<'lt> {
fn from(pk: &'lt Arc<KitsuneAgent>) -> Self {
Self::PubKey(pk)
}
}
impl<'lt> AgentLike<'lt> {
/// Get a raw agent pubkey from any variant type
pub fn agent(&self) -> &Arc<KitsuneAgent> {
match self {
Self::Info(i) => &i.agent,
Self::PubKey(pk) => pk,
}
}
}
impl Metrics {
/// Dump historical metrics for recording to db.
pub fn dump_historical(&self) -> Vec<MetricRecord> {
let now = Timestamp::now();
let expires_at =
Timestamp::from_micros(now.as_micros() + HISTORICAL_RECORD_EXPIRE_DURATION_MICROS);
let mut out = Vec::new();
for (agent, node) in self.map.iter() {
out.push(MetricRecord {
kind: MetricRecordKind::ReachabilityQuotient,
agent: Some(agent.clone()),
recorded_at_utc: now,
expires_at_utc: expires_at,
data: serde_json::json!(*node.reachability_quotient),
});
out.push(MetricRecord {
kind: MetricRecordKind::LatencyMicros,
agent: Some(agent.clone()),
recorded_at_utc: now,
expires_at_utc: expires_at,
data: serde_json::json!(*node.latency_micros),
});
}
out.push(MetricRecord {
kind: MetricRecordKind::AggExtrapCov,
agent: None,
recorded_at_utc: now,
expires_at_utc: expires_at,
data: serde_json::json!(*self.agg_extrap_cov),
});
out
}
/// Dump json encoded metrics
pub fn dump(&self) -> serde_json::Value {
let agents: serde_json::Value = self
.map
.iter()
.map(|(a, i)| {
(
a.to_string(),
serde_json::json!({
"reachability_quotient": *i.reachability_quotient,
"latency_micros": *i.latency_micros,
}),
)
})
.collect::<serde_json::map::Map<String, serde_json::Value>>()
.into();
serde_json::json!({
"aggExtrapCov": *self.agg_extrap_cov,
"agents": agents,
})
}
/// Record an individual extrapolated coverage event
/// (either from us or a remote)
/// and add it to our running aggregate extrapolated coverage metric.
pub fn record_extrap_cov_event(&mut self, extrap_cov: f32) {
self.agg_extrap_cov.push(extrap_cov);
}
/// Sucessful and unsuccessful messages from the remote
/// can be combined to estimate a "reachability quotient"
/// between 1 (or 0 if empty) and 100. Errors are weighted
/// heavier because we retry less frequently.
/// Call this to register a reachability event.
/// Note, `record_success` and `record_error` below invoke this
/// function internally, you don't need to call it again.
pub fn record_reachability_event<'a, T, I>(&mut self, success: bool, remote_agent_list: I)
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
for agent_info in remote_agent_list {
let info = self
.map
.entry(agent_info.into().agent().clone())
.or_default();
if success {
info.reachability_quotient.push(100);
} else {
info.reachability_quotient.push_n(1, 5);
}
}
}
/// Running average for latency microseconds for any direct
/// request/response calls to remote agent.
pub fn record_latency_micros<'a, T, I, V>(&mut self, micros: V, remote_agent_list: I)
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
V: AsPrimitive<f32>,
{
for agent_info in remote_agent_list {
let info = self
.map
.entry(agent_info.into().agent().clone())
.or_default();
info.latency_micros.push(micros);
}
}
/// Record a gossip round has been initiated by us.
pub fn record_initiate<'a, T, I>(&mut self, remote_agent_list: I)
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
for agent_info in remote_agent_list {
let info = self
.map
.entry(agent_info.into().agent().clone())
.or_default();
record_instant(&mut info.initiates);
info.current_round = true;
}
}
/// Record a remote gossip round has started.
pub fn record_remote_round<'a, T, I>(&mut self, remote_agent_list: I)
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
for agent_info in remote_agent_list {
let info = self
.map
.entry(agent_info.into().agent().clone())
.or_default();
record_instant(&mut info.remote_rounds);
info.current_round = true;
}
}
/// Record a gossip round has completed successfully.
pub fn record_success<'a, T, I>(&mut self, remote_agent_list: I)
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
let mut should_dec_force_initiates = false;
for agent_info in remote_agent_list {
let info = self
.map
.entry(agent_info.into().agent().clone())
.or_default();
info.reachability_quotient.push(100);
record_instant(&mut info.complete_rounds);
info.current_round = false;
if info.is_initiate_round() {
should_dec_force_initiates = true;
}
}
if should_dec_force_initiates {
self.force_initiates = self.force_initiates.saturating_sub(1);
}
}
/// Record a gossip round has finished with an error.
pub fn record_error<'a, T, I>(&mut self, remote_agent_list: I)
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
for agent_info in remote_agent_list {
let info = self
.map
.entry(agent_info.into().agent().clone())
.or_default();
info.reachability_quotient.push_n(1, 5);
record_instant(&mut info.errors);
info.current_round = false;
}
}
/// Record that we should force initiate the next few rounds.
pub fn record_force_initiate(&mut self) {
self.force_initiates = MAX_TRIGGERS;
}
/// Get the last successful round time.
pub fn last_success<'a, T, I>(&self, remote_agent_list: I) -> Option<&Instant>
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
remote_agent_list
.into_iter()
.filter_map(|agent_info| self.map.get(agent_info.into().agent()))
.filter_map(|info| info.complete_rounds.back())
.min()
}
/// Is this node currently in an active round?
pub fn is_current_round<'a, T, I>(&self, remote_agent_list: I) -> bool
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
remote_agent_list
.into_iter()
.filter_map(|agent_info| self.map.get(agent_info.into().agent()))
.map(|info| info.current_round)
.any(|x| x)
}
/// What was the last outcome for this node's gossip round?
pub fn last_outcome<'a, T, I>(&self, remote_agent_list: I) -> Option<RoundOutcome>
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
#[allow(clippy::map_flatten)]
remote_agent_list
.into_iter()
.filter_map(|agent_info| self.map.get(agent_info.into().agent()))
.map(|info| {
[
info.errors.back().map(|x| RoundOutcome::Error(*x)),
info.complete_rounds
.back()
.map(|x| RoundOutcome::Success(*x)),
]
})
.flatten()
.flatten()
.max()
}
/// Should we force initiate the next round?
pub fn forced_initiate(&self) -> bool {
self.force_initiates > 0
}
/// Return the average (mean) reachability quotient for the
/// supplied remote agents.
pub fn reachability_quotient<'a, T, I>(&self, remote_agent_list: I) -> f32
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
let (sum, cnt) = remote_agent_list
.into_iter()
.filter_map(|agent_info| self.map.get(agent_info.into().agent()))
.map(|info| *info.reachability_quotient)
.fold((0.0, 0.0), |acc, x| (acc.0 + x, acc.1 + 1.0));
if cnt <= 0.0 {
0.0
} else {
sum / cnt
}
}
/// Return the average (mean) latency microseconds for the
/// supplied remote agents.
pub fn latency_micros<'a, T, I>(&self, remote_agent_list: I) -> f32
where
T: Into<AgentLike<'a>>,
I: IntoIterator<Item = T>,
{
let (sum, cnt) = remote_agent_list
.into_iter()
.filter_map(|agent_info| self.map.get(agent_info.into().agent()))
.map(|info| *info.latency_micros)
.fold((0.0, 0.0), |acc, x| (acc.0 + x, acc.1 + 1.0));
if cnt <= 0.0 {
0.0
} else {
sum / cnt
}
}
}
impl NodeInfo {
/// Was the last round for this node initiated by us?
fn is_initiate_round(&self) -> bool {
match (self.remote_rounds.back(), self.initiates.back()) {
(None, None) | (Some(_), None) => false,
(None, Some(_)) => true,
(Some(remote), Some(initiate)) => initiate > remote,
}
}
}
fn record_instant(buffer: &mut VecDeque<Instant>) {
if buffer.len() > MAX_HISTORY {
buffer.pop_front();
}
buffer.push_back(Instant::now());
}
impl std::fmt::Display for Metrics {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
static TRACE: once_cell::sync::Lazy<bool> = once_cell::sync::Lazy::new(|| {
std::env::var("GOSSIP_METRICS").map_or(false, |s| s == "trace")
});
let trace = *TRACE;
write!(f, "Metrics:")?;
let mut average_last_completion = std::time::Duration::default();
let mut max_last_completion = std::time::Duration::default();
let mut average_completion_frequency = std::time::Duration::default();
let mut complete_rounds = 0;
let mut min_complete_rounds = usize::MAX;
for (key, info) in &self.map {
let completion_frequency: std::time::Duration =
info.complete_rounds.iter().map(|i| i.elapsed()).sum();
let completion_frequency = completion_frequency
.checked_div(info.complete_rounds.len() as u32)
.unwrap_or_default();
let last_completion = info
.complete_rounds
.back()
.map(|i| i.elapsed())
.unwrap_or_default();
average_last_completion += last_completion;
max_last_completion = max_last_completion.max(last_completion);
average_completion_frequency += completion_frequency;
if !info.complete_rounds.is_empty() {
complete_rounds += 1;
}
min_complete_rounds = min_complete_rounds.min(info.complete_rounds.len());
if trace {
write!(f, "\n\t{:?}:", key)?;
write!(
f,
"\n\t\tErrors: {}, Last: {:?}",
info.errors.len(),
info.errors.back().map(|i| i.elapsed()).unwrap_or_default()
)?;
write!(
f,
"\n\t\tInitiates: {}, Last: {:?}",
info.initiates.len(),
info.initiates
.back()
.map(|i| i.elapsed())
.unwrap_or_default()
)?;
write!(
f,
"\n\t\tRemote Rounds: {}, Last: {:?}",
info.remote_rounds.len(),
info.remote_rounds
.back()
.map(|i| i.elapsed())
.unwrap_or_default()
)?;
write!(
f,
"\n\t\tComplete Rounds: {}, Last: {:?}, Average completion Frequency: {:?}",
info.complete_rounds.len(),
last_completion,
completion_frequency
)?;
write!(f, "\n\t\tCurrent Round: {}", info.current_round)?;
}
}
write!(
f,
"\n\tNumber of remote nodes complete {} out of {}. Min per node: {}.",
complete_rounds,
self.map.len(),
min_complete_rounds
)?;
write!(
f,
"\n\tAverage time since last completion: {:?}",
average_last_completion
.checked_div(self.map.len() as u32)
.unwrap_or_default()
)?;
write!(
f,
"\n\tMax time since last completion: {:?}",
max_last_completion
)?;
write!(
f,
"\n\tAverage completion frequency: {:?}",
average_completion_frequency
.checked_div(self.map.len() as u32)
.unwrap_or_default()
)?;
write!(f, "\n\tForce Initiate: {}", self.force_initiates)?;
Ok(())
}
}
/// Synchronization primitive around the Metrics struct.
#[derive(Clone)]
pub struct MetricsSync(Arc<parking_lot::RwLock<Metrics>>);
impl Default for MetricsSync {
fn default() -> Self {
Self(Arc::new(parking_lot::RwLock::new(Metrics::default())))
}
}
impl std::fmt::Debug for MetricsSync {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.read().fmt(f)
}
}
impl std::fmt::Display for MetricsSync {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.read().fmt(f)
}
}
impl MetricsSync {
/// Get a read lock for the metrics store.
pub fn read(&self) -> parking_lot::RwLockReadGuard<Metrics> {
match self.0.try_read_for(std::time::Duration::from_millis(100)) {
Some(g) => g,
// This won't block if a writer is waiting.
// NOTE: This is a bit of a hack to work around a lock somewhere that is errant-ly
// held over another call to lock. Really we should fix that error,
// potentially by using a closure pattern here to ensure the lock cannot
// be held beyond the access logic.
None => self.0.read_recursive(),
}
}
/// Get a write lock for the metrics store.
pub fn write(&self) -> parking_lot::RwLockWriteGuard<Metrics> {
match self.0.try_write_for(std::time::Duration::from_secs(100)) {
Some(g) => g,
None => {
eprintln!("Metrics lock likely deadlocked");
self.0.write()
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test(flavor = "multi_thread")]
async fn test_run_avg() {
let mut a1 = RunAvg::default();
a1.push(100);
a1.push(1);
a1.push(1);
a1.push(1);
assert_eq!(25.75, *a1);
let mut a2 = RunAvg::default();
a2.push_n(100, 1);
a2.push_n(1, 3);
assert_eq!(25.75, *a2);
let mut a3 = RunAvg::default();
a3.push_n(100, 255);
a3.push(1);
assert_eq!(99.61176, *a3);
let mut a4 = RunAvg::default();
a4.push_n(100, 255);
a4.push_n(1, 128);
assert_eq!(50.30588, *a4);
let mut a5 = RunAvg::default();
a5.push_n(100, 255);
a5.push_n(1, 255);
assert_eq!(1.0, *a5);
}
}
| 30.978852 | 96 | 0.543056 |
eb7da19b3f50ef224f29101ff234bf4e293091c1 | 13,014 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CALCTRL {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `UPSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum UPSELR {
#[doc = "Select HFXO as up-counter."]
HFXO,
#[doc = "Select LFXO as up-counter."]
LFXO,
#[doc = "Select HFRCO as up-counter."]
HFRCO,
#[doc = "Select LFRCO as up-counter."]
LFRCO,
#[doc = "Select AUXHFRCO as up-counter."]
AUXHFRCO,
#[doc = "Select USHFRCO as up-counter."]
USHFRCO,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl UPSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
UPSELR::HFXO => 0,
UPSELR::LFXO => 1,
UPSELR::HFRCO => 2,
UPSELR::LFRCO => 3,
UPSELR::AUXHFRCO => 4,
UPSELR::USHFRCO => 5,
UPSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> UPSELR {
match value {
0 => UPSELR::HFXO,
1 => UPSELR::LFXO,
2 => UPSELR::HFRCO,
3 => UPSELR::LFRCO,
4 => UPSELR::AUXHFRCO,
5 => UPSELR::USHFRCO,
i => UPSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `HFXO`"]
#[inline]
pub fn is_hfxo(&self) -> bool {
*self == UPSELR::HFXO
}
#[doc = "Checks if the value of the field is `LFXO`"]
#[inline]
pub fn is_lfxo(&self) -> bool {
*self == UPSELR::LFXO
}
#[doc = "Checks if the value of the field is `HFRCO`"]
#[inline]
pub fn is_hfrco(&self) -> bool {
*self == UPSELR::HFRCO
}
#[doc = "Checks if the value of the field is `LFRCO`"]
#[inline]
pub fn is_lfrco(&self) -> bool {
*self == UPSELR::LFRCO
}
#[doc = "Checks if the value of the field is `AUXHFRCO`"]
#[inline]
pub fn is_auxhfrco(&self) -> bool {
*self == UPSELR::AUXHFRCO
}
#[doc = "Checks if the value of the field is `USHFRCO`"]
#[inline]
pub fn is_ushfrco(&self) -> bool {
*self == UPSELR::USHFRCO
}
}
#[doc = "Possible values of the field `DOWNSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DOWNSELR {
#[doc = "Select HFCLK for down-counter."]
HFCLK,
#[doc = "Select HFXO for down-counter."]
HFXO,
#[doc = "Select LFXO for down-counter."]
LFXO,
#[doc = "Select HFRCO for down-counter."]
HFRCO,
#[doc = "Select LFRCO for down-counter."]
LFRCO,
#[doc = "Select AUXHFRCO for down-counter."]
AUXHFRCO,
#[doc = "Select USHFRCO for down-counter."]
USHFRCO,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl DOWNSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
DOWNSELR::HFCLK => 0,
DOWNSELR::HFXO => 1,
DOWNSELR::LFXO => 2,
DOWNSELR::HFRCO => 3,
DOWNSELR::LFRCO => 4,
DOWNSELR::AUXHFRCO => 5,
DOWNSELR::USHFRCO => 6,
DOWNSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> DOWNSELR {
match value {
0 => DOWNSELR::HFCLK,
1 => DOWNSELR::HFXO,
2 => DOWNSELR::LFXO,
3 => DOWNSELR::HFRCO,
4 => DOWNSELR::LFRCO,
5 => DOWNSELR::AUXHFRCO,
6 => DOWNSELR::USHFRCO,
i => DOWNSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `HFCLK`"]
#[inline]
pub fn is_hfclk(&self) -> bool {
*self == DOWNSELR::HFCLK
}
#[doc = "Checks if the value of the field is `HFXO`"]
#[inline]
pub fn is_hfxo(&self) -> bool {
*self == DOWNSELR::HFXO
}
#[doc = "Checks if the value of the field is `LFXO`"]
#[inline]
pub fn is_lfxo(&self) -> bool {
*self == DOWNSELR::LFXO
}
#[doc = "Checks if the value of the field is `HFRCO`"]
#[inline]
pub fn is_hfrco(&self) -> bool {
*self == DOWNSELR::HFRCO
}
#[doc = "Checks if the value of the field is `LFRCO`"]
#[inline]
pub fn is_lfrco(&self) -> bool {
*self == DOWNSELR::LFRCO
}
#[doc = "Checks if the value of the field is `AUXHFRCO`"]
#[inline]
pub fn is_auxhfrco(&self) -> bool {
*self == DOWNSELR::AUXHFRCO
}
#[doc = "Checks if the value of the field is `USHFRCO`"]
#[inline]
pub fn is_ushfrco(&self) -> bool {
*self == DOWNSELR::USHFRCO
}
}
#[doc = r" Value of the field"]
pub struct CONTR {
bits: bool,
}
impl CONTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = "Values that can be written to the field `UPSEL`"]
pub enum UPSELW {
#[doc = "Select HFXO as up-counter."]
HFXO,
#[doc = "Select LFXO as up-counter."]
LFXO,
#[doc = "Select HFRCO as up-counter."]
HFRCO,
#[doc = "Select LFRCO as up-counter."]
LFRCO,
#[doc = "Select AUXHFRCO as up-counter."]
AUXHFRCO,
#[doc = "Select USHFRCO as up-counter."]
USHFRCO,
}
impl UPSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
UPSELW::HFXO => 0,
UPSELW::LFXO => 1,
UPSELW::HFRCO => 2,
UPSELW::LFRCO => 3,
UPSELW::AUXHFRCO => 4,
UPSELW::USHFRCO => 5,
}
}
}
#[doc = r" Proxy"]
pub struct _UPSELW<'a> {
w: &'a mut W,
}
impl<'a> _UPSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: UPSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Select HFXO as up-counter."]
#[inline]
pub fn hfxo(self) -> &'a mut W {
self.variant(UPSELW::HFXO)
}
#[doc = "Select LFXO as up-counter."]
#[inline]
pub fn lfxo(self) -> &'a mut W {
self.variant(UPSELW::LFXO)
}
#[doc = "Select HFRCO as up-counter."]
#[inline]
pub fn hfrco(self) -> &'a mut W {
self.variant(UPSELW::HFRCO)
}
#[doc = "Select LFRCO as up-counter."]
#[inline]
pub fn lfrco(self) -> &'a mut W {
self.variant(UPSELW::LFRCO)
}
#[doc = "Select AUXHFRCO as up-counter."]
#[inline]
pub fn auxhfrco(self) -> &'a mut W {
self.variant(UPSELW::AUXHFRCO)
}
#[doc = "Select USHFRCO as up-counter."]
#[inline]
pub fn ushfrco(self) -> &'a mut W {
self.variant(UPSELW::USHFRCO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `DOWNSEL`"]
pub enum DOWNSELW {
#[doc = "Select HFCLK for down-counter."]
HFCLK,
#[doc = "Select HFXO for down-counter."]
HFXO,
#[doc = "Select LFXO for down-counter."]
LFXO,
#[doc = "Select HFRCO for down-counter."]
HFRCO,
#[doc = "Select LFRCO for down-counter."]
LFRCO,
#[doc = "Select AUXHFRCO for down-counter."]
AUXHFRCO,
#[doc = "Select USHFRCO for down-counter."]
USHFRCO,
}
impl DOWNSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
DOWNSELW::HFCLK => 0,
DOWNSELW::HFXO => 1,
DOWNSELW::LFXO => 2,
DOWNSELW::HFRCO => 3,
DOWNSELW::LFRCO => 4,
DOWNSELW::AUXHFRCO => 5,
DOWNSELW::USHFRCO => 6,
}
}
}
#[doc = r" Proxy"]
pub struct _DOWNSELW<'a> {
w: &'a mut W,
}
impl<'a> _DOWNSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: DOWNSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Select HFCLK for down-counter."]
#[inline]
pub fn hfclk(self) -> &'a mut W {
self.variant(DOWNSELW::HFCLK)
}
#[doc = "Select HFXO for down-counter."]
#[inline]
pub fn hfxo(self) -> &'a mut W {
self.variant(DOWNSELW::HFXO)
}
#[doc = "Select LFXO for down-counter."]
#[inline]
pub fn lfxo(self) -> &'a mut W {
self.variant(DOWNSELW::LFXO)
}
#[doc = "Select HFRCO for down-counter."]
#[inline]
pub fn hfrco(self) -> &'a mut W {
self.variant(DOWNSELW::HFRCO)
}
#[doc = "Select LFRCO for down-counter."]
#[inline]
pub fn lfrco(self) -> &'a mut W {
self.variant(DOWNSELW::LFRCO)
}
#[doc = "Select AUXHFRCO for down-counter."]
#[inline]
pub fn auxhfrco(self) -> &'a mut W {
self.variant(DOWNSELW::AUXHFRCO)
}
#[doc = "Select USHFRCO for down-counter."]
#[inline]
pub fn ushfrco(self) -> &'a mut W {
self.variant(DOWNSELW::USHFRCO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CONTW<'a> {
w: &'a mut W,
}
impl<'a> _CONTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:2 - Calibration Up-counter Select"]
#[inline]
pub fn upsel(&self) -> UPSELR {
UPSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 3:5 - Calibration Down-counter Select"]
#[inline]
pub fn downsel(&self) -> DOWNSELR {
DOWNSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 6 - Continuous Calibration"]
#[inline]
pub fn cont(&self) -> CONTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CONTR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:2 - Calibration Up-counter Select"]
#[inline]
pub fn upsel(&mut self) -> _UPSELW {
_UPSELW { w: self }
}
#[doc = "Bits 3:5 - Calibration Down-counter Select"]
#[inline]
pub fn downsel(&mut self) -> _DOWNSELW {
_DOWNSELW { w: self }
}
#[doc = "Bit 6 - Continuous Calibration"]
#[inline]
pub fn cont(&mut self) -> _CONTW {
_CONTW { w: self }
}
}
| 26.944099 | 61 | 0.512602 |
8f3439ed7fe5f9f2f0e96ab31a7c7de3594ccfb1 | 47 |
pub enum Theme {
Default,
ClassicDMG
} | 9.4 | 16 | 0.617021 |
8f69a0d111f495af6fd704a52b3125e5c73b1d92 | 262 | //! Converter traits for things that can be converted into tokens.
use super::tokens::Tokens;
/// Helper trait to convert something into tokens.
pub trait IntoTokens<'el, C> {
/// Convert the type into tokens.
fn into_tokens(self) -> Tokens<'el, C>;
}
| 26.2 | 66 | 0.694656 |
ccf23332a3df446d45c083808c640810278b1632 | 4,019 | // Generated from definition io.k8s.api.apps.v1beta2.DaemonSetUpdateStrategy
/// DaemonSetUpdateStrategy is a struct used to control the update strategy for a DaemonSet.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct DaemonSetUpdateStrategy {
/// Rolling update config params. Present only if type = "RollingUpdate".
pub rolling_update: Option<crate::v1_15::api::apps::v1beta2::RollingUpdateDaemonSet>,
/// Type of daemon set update. Can be "RollingUpdate" or "OnDelete". Default is RollingUpdate.
pub type_: Option<String>,
}
impl<'de> serde::Deserialize<'de> for DaemonSetUpdateStrategy {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_rolling_update,
Key_type_,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"rollingUpdate" => Field::Key_rolling_update,
"type" => Field::Key_type_,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = DaemonSetUpdateStrategy;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct DaemonSetUpdateStrategy")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_rolling_update: Option<crate::v1_15::api::apps::v1beta2::RollingUpdateDaemonSet> = None;
let mut value_type_: Option<String> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_rolling_update => value_rolling_update = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_type_ => value_type_ = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(DaemonSetUpdateStrategy {
rolling_update: value_rolling_update,
type_: value_type_,
})
}
}
deserializer.deserialize_struct(
"DaemonSetUpdateStrategy",
&[
"rollingUpdate",
"type",
],
Visitor,
)
}
}
impl serde::Serialize for DaemonSetUpdateStrategy {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"DaemonSetUpdateStrategy",
self.rolling_update.as_ref().map_or(0, |_| 1) +
self.type_.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.rolling_update {
serde::ser::SerializeStruct::serialize_field(&mut state, "rollingUpdate", value)?;
}
if let Some(value) = &self.type_ {
serde::ser::SerializeStruct::serialize_field(&mut state, "type", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 39.792079 | 120 | 0.540931 |
500e52c7afa926b836373299539888b18ecdbaf1 | 1,359 | #pragma version(1)
#pragma rs java_package_name(layers)
rs_allocation In_Blob;
rs_allocation Out_Blob;
float num;
float __attribute__((kernel)) compute(float4 in, uint32_t x)
{
float sum = 0;
for(int i=0;i<100;i++){
float4 s = (in + i) * in;
sum += s.x + s.y + s.z + s.w;
//sum += dot(in+i, in);
}
return sum;
}
short __attribute__((kernel)) compute_short(short4 in, uint32_t x)
{
short sum = 0;
for(short i=0;i<100;i++){
short4 s = (in + i) * in;
sum += s.x + s.y + s.z + s.w;
}
return sum;
}
int __attribute__((kernel)) compute_int(int4 in, uint32_t x)
{
int sum = 0;
for(int i=0;i<100;i++){
int4 s = (in + i) * in;
sum += s.x + s.y + s.z + s.w;
}
return sum;
}
void __attribute__((kernel)) compute_test(uint32_t x)
{
float x1 = rsGetElementAt_float(In_Blob, x * 2);
float y1 = rsGetElementAt_float(In_Blob, x * 2 + 1);
rsSetElementAt_float(Out_Blob, x1, x * 2);
rsSetElementAt_float(Out_Blob, y1, x * 2 + 1);
}
void __attribute__((kernel)) compute_test_index(uint32_t x, uint32_t y)
{
float3 input = rsGetElementAt_float3(In_Blob, x, y);
if(x==0 && y==1){
float sum = dot(input, input);
rsSetElementAt_float(Out_Blob, sum, 0, 0);
}
}
float __attribute__((kernel)) reshape(float in)
{
return in;
} | 23.033898 | 71 | 0.590876 |
2f16712bceb981379f3a0930d384c69a8b3ec2aa | 3,860 | use crate::parse_instruction::{ParsableProgram, ParseInstructionError, ParsedInstructionEnum};
use bincode::deserialize;
use serde_json::json;
use solana_sdk::{
instruction::CompiledInstruction, loader_instruction::LoaderInstruction, pubkey::Pubkey,
};
pub fn parse_bpf_loader(
instruction: &CompiledInstruction,
account_keys: &[Pubkey],
) -> Result<ParsedInstructionEnum, ParseInstructionError> {
let bpf_loader_instruction: LoaderInstruction = deserialize(&instruction.data)
.map_err(|_| ParseInstructionError::InstructionNotParsable(ParsableProgram::BpfLoader))?;
if instruction.accounts.is_empty() || instruction.accounts[0] as usize >= account_keys.len() {
return Err(ParseInstructionError::InstructionKeyMismatch(
ParsableProgram::BpfLoader,
));
}
match bpf_loader_instruction {
LoaderInstruction::Write { offset, bytes } => Ok(ParsedInstructionEnum {
instruction_type: "write".to_string(),
info: json!({
"offset": offset,
"bytes": base64::encode(bytes),
"account": account_keys[instruction.accounts[0] as usize].to_string(),
}),
}),
LoaderInstruction::Finalize => Ok(ParsedInstructionEnum {
instruction_type: "finalize".to_string(),
info: json!({
"account": account_keys[instruction.accounts[0] as usize].to_string(),
}),
}),
}
}
#[cfg(test)]
mod test {
use super::*;
use solana_sdk::{message::Message, pubkey};
#[test]
fn test_parse_bpf_loader_instructions() {
let account_pubkey = pubkey::new_rand();
let program_id = pubkey::new_rand();
let offset = 4242;
let bytes = vec![8; 99];
let fee_payer = pubkey::new_rand();
let account_keys = vec![fee_payer, account_pubkey];
let missing_account_keys = vec![account_pubkey];
let instruction = solana_sdk::loader_instruction::write(
&account_pubkey,
&program_id,
offset,
bytes.clone(),
);
let message = Message::new(&[instruction], Some(&fee_payer));
assert_eq!(
parse_bpf_loader(&message.instructions[0], &account_keys).unwrap(),
ParsedInstructionEnum {
instruction_type: "write".to_string(),
info: json!({
"offset": offset,
"bytes": base64::encode(&bytes),
"account": account_pubkey.to_string(),
}),
}
);
assert!(parse_bpf_loader(&message.instructions[0], &missing_account_keys).is_err());
let instruction = solana_sdk::loader_instruction::finalize(&account_pubkey, &program_id);
let message = Message::new(&[instruction], Some(&fee_payer));
assert_eq!(
parse_bpf_loader(&message.instructions[0], &account_keys).unwrap(),
ParsedInstructionEnum {
instruction_type: "finalize".to_string(),
info: json!({
"account": account_pubkey.to_string(),
}),
}
);
assert!(parse_bpf_loader(&message.instructions[0], &missing_account_keys).is_err());
let bad_compiled_instruction = CompiledInstruction {
program_id_index: 3,
accounts: vec![1, 2],
data: vec![2, 0, 0, 0], // LoaderInstruction enum only has 2 variants
};
assert!(parse_bpf_loader(&bad_compiled_instruction, &account_keys).is_err());
let bad_compiled_instruction = CompiledInstruction {
program_id_index: 3,
accounts: vec![],
data: vec![1, 0, 0, 0],
};
assert!(parse_bpf_loader(&bad_compiled_instruction, &account_keys).is_err());
}
}
| 38.6 | 98 | 0.601295 |
0a50e71ea00242efd3ecb082137a28f6cf14c896 | 268 | use super::{Consumer, ConsumerError, Context};
use crate::stat::Alias;
use clibri::client;
pub async fn handler<E: client::Error>(
error: Option<ConsumerError<E>>,
context: &mut Context,
consumer: Consumer<E>,
) {
context.inc_stat(Alias::Shutdown);
}
| 22.333333 | 46 | 0.686567 |
0a6265371a0e3c53a4e1f00b1955dad597c2a035 | 9,748 | extern crate sample;
use defs;
use shared::{
event::EngineEvent,
parameter::{
BaseliskPluginParameters,
ParameterId,
},
};
use engine::{
buffer::ResizableFrameBuffer,
filter::{
BiquadCoefficients,
BiquadSampleHistory,
get_lowpass_second_order_biquad_consts,
get_highpass_second_order_biquad_consts,
process_biquad,
},
traits,
};
use sample::ring_buffer;
use std::slice::Iter;
pub struct DelayChannel {
delay_buffer: ring_buffer::Fixed<Vec<defs::Sample>>,
highpass_history: BiquadSampleHistory,
lowpass_history: BiquadSampleHistory,
wet_buffer: ResizableFrameBuffer<defs::MonoFrame>,
}
impl DelayChannel {
pub fn new() -> Self {
// We'll throw this temporary ringbuffer away as soon as we know the
// real size (proportional to sample rate, which we don't know yet).
let mut delay_buffer_vec = Vec::with_capacity(1);
delay_buffer_vec.push(0.0);
Self {
delay_buffer: ring_buffer::Fixed::from(delay_buffer_vec),
highpass_history: BiquadSampleHistory::new(),
lowpass_history: BiquadSampleHistory::new(),
wet_buffer: ResizableFrameBuffer::new(),
}
}
pub fn process_between_keyframes(&mut self,
this_keyframe: usize,
next_keyframe: usize,
delay_time: defs::Sample,
feedback: defs::Sample,
wet_gain: defs::Sample,
highpass_coeffs: &BiquadCoefficients,
lowpass_coeffs: &BiquadCoefficients,
buffer: &mut defs::MonoFrameBufferSlice)
{
let wet_buffer = self.wet_buffer.get_sized_mut(buffer.len());
let buffer_tap_position_float = self.delay_buffer.len() as defs::Sample * (
1.0 - delay_time);
let buffer_tap_a_index = buffer_tap_position_float as usize;
let buffer_tap_b_index = buffer_tap_a_index + 1;
let delayed_sample_b_weight = buffer_tap_position_float.fract();
let delayed_sample_a_weight = 1.0 - delayed_sample_b_weight;
for frame_num in this_keyframe..next_keyframe {
let delayed_sample_a = self.delay_buffer[buffer_tap_a_index];
let delayed_sample_b = self.delay_buffer[buffer_tap_b_index];
let mut delayed_sample = feedback * (
delayed_sample_a * delayed_sample_a_weight
+ delayed_sample_b * delayed_sample_b_weight);
// Apply highpass to left delayed sample
delayed_sample = process_biquad(
&mut self.highpass_history,
highpass_coeffs,
delayed_sample);
// Apply lowpass to left delayed sample
delayed_sample = process_biquad(
&mut self.lowpass_history,
lowpass_coeffs,
delayed_sample);
wet_buffer[frame_num] = [delayed_sample];
// Mix in the dry signal and push back to the delay buffer
let dry_sample = buffer[frame_num][0];
self.delay_buffer.push(
dry_sample + delayed_sample);
// Mix the wet signal into the output buffer with the dry signal.
buffer[frame_num][0] += wet_gain * wet_buffer[frame_num][0];
} // end borrow of buffer
}
}
impl traits::Processor for DelayChannel {
fn set_sample_rate(&mut self, sample_rate: defs::Sample) {
let capacity = sample_rate as usize; // 1 second of buffer time
let mut delay_buffer_vec = Vec::with_capacity(capacity);
delay_buffer_vec.resize(capacity, 0.0);
self.delay_buffer = ring_buffer::Fixed::from(delay_buffer_vec);
}
fn panic(&mut self) {
// Silence the delay output and reset the coefficient history
for sample in self.delay_buffer.iter_mut() {
*sample = 0.0;
}
self.highpass_history.reset();
self.lowpass_history.reset();
}
}
pub struct Delay {
highpass_coeffs: BiquadCoefficients,
lowpass_coeffs: BiquadCoefficients,
channels: [DelayChannel; 2],
}
impl Delay {
pub fn new() -> Self {
Self {
highpass_coeffs: BiquadCoefficients::new(),
lowpass_coeffs: BiquadCoefficients::new(),
channels: [DelayChannel::new(), DelayChannel::new()],
}
}
pub fn process_buffer(&mut self,
left_buffer: &mut defs::MonoFrameBufferSlice,
right_buffer: &mut defs::MonoFrameBufferSlice,
mut engine_event_iter: Iter<(usize, EngineEvent)>,
sample_rate: defs::Sample,
params: &BaseliskPluginParameters)
{
let buffer_len = left_buffer.len(); // right_buffer must be same length
self.channels[0].wet_buffer.get_sized_mut(buffer_len);
self.channels[1].wet_buffer.get_sized_mut(buffer_len);
// Calculate the output values per-frame
let mut this_keyframe: usize = 0;
let mut next_keyframe: usize;
loop {
// Get next selected note, if there is one.
let next_event = engine_event_iter.next();
if let Some((frame_num, engine_event)) = next_event {
match engine_event {
EngineEvent::ModulateParameter { param_id, .. } => match *param_id {
// All delay events will trigger keyframes
ParameterId::DelayTimeLeft |
ParameterId::DelayTimeRight |
ParameterId::DelayFeedback |
ParameterId::DelayHighPassFilterFrequency |
ParameterId::DelayLowPassFilterFrequency |
ParameterId::DelayWetGain => (),
_ => continue,
},
_ => continue,
};
next_keyframe = *frame_num;
} else {
// No more note change events, so we'll process to the end of the buffer.
next_keyframe = buffer_len;
};
// Apply the old parameters up until next_keyframe.
let feedback = params.get_real_value(ParameterId::DelayFeedback);
let wet_gain = params.get_real_value(ParameterId::DelayWetGain);
let lowpass_frequency_hz = params.get_real_value(
ParameterId::DelayLowPassFilterFrequency);
let lowpass_quality = 0.707;
// Lowpass filter coefficients
get_lowpass_second_order_biquad_consts(
lowpass_frequency_hz,
lowpass_quality,
sample_rate,
&mut self.lowpass_coeffs);
let highpass_frequency_hz = params.get_real_value(
ParameterId::DelayHighPassFilterFrequency);
let highpass_quality = 0.707;
// Highpass filter coefficients
get_highpass_second_order_biquad_consts(
highpass_frequency_hz,
highpass_quality,
sample_rate,
&mut self.highpass_coeffs);
// Left first...
self.channels[0].process_between_keyframes(
this_keyframe,
next_keyframe,
params.get_real_value(ParameterId::DelayTimeLeft),
feedback,
wet_gain,
&self.highpass_coeffs,
&self.lowpass_coeffs,
left_buffer);
// ... Then right
self.channels[1].process_between_keyframes(
this_keyframe,
next_keyframe,
params.get_real_value(ParameterId::DelayTimeRight),
feedback,
wet_gain,
&self.highpass_coeffs,
&self.lowpass_coeffs,
right_buffer);
// We've reached the next_keyframe.
this_keyframe = next_keyframe;
// What we do now depends on whether we reached the end of the buffer.
if this_keyframe == buffer_len {
// Loop exit condition: reached the end of the buffer.
break
} else {
// Before the next iteration, use the event at this keyframe
// to update the current state.
let (_, event) = next_event.unwrap();
if let EngineEvent::ModulateParameter { param_id, value } = event {
match *param_id {
ParameterId::DelayTimeLeft |
ParameterId::DelayTimeRight |
ParameterId::DelayFeedback |
ParameterId::DelayHighPassFilterFrequency |
ParameterId::DelayLowPassFilterFrequency |
ParameterId::DelayWetGain => {
params.set_parameter(*param_id, *value);
}
_ => (),
}
};
}
}
}
}
impl traits::Processor for Delay {
fn set_sample_rate(&mut self, sample_rate: defs::Sample) {
for channel in self.channels.iter_mut() {
channel.set_sample_rate(sample_rate);
}
}
fn panic(&mut self) {
for channel in self.channels.iter_mut() {
channel.panic();
}
}
}
| 37.206107 | 89 | 0.555088 |
9b4f8788895593f783dd4e518842a0453fe5ab7b | 33,158 | use std::convert::TryInto;
use std::ffi::c_void;
use std::ptr::null;
use std::slice;
use std::sync::Arc;
use libc::size_t;
use rustls::server::{
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, ClientCertVerifier,
ClientHello, NoClientAuth, ResolvesServerCert, ServerConfig, ServerConnection,
StoresServerSessions,
};
use rustls::sign::CertifiedKey;
use rustls::{
ProtocolVersion, SignatureScheme, SupportedCipherSuite, WantsVerifier, ALL_CIPHER_SUITES,
};
use crate::cipher::{
rustls_certified_key, rustls_client_cert_verifier, rustls_client_cert_verifier_optional,
rustls_supported_ciphersuite,
};
use crate::connection::{rustls_connection, Connection};
use crate::error::rustls_result::{InvalidParameter, NullParameter};
use crate::error::{map_error, rustls_result};
use crate::rslice::{rustls_slice_bytes, rustls_slice_slice_bytes, rustls_slice_u16, rustls_str};
use crate::session::{
rustls_session_store_get_callback, rustls_session_store_put_callback, SessionStoreBroker,
SessionStoreGetCallback, SessionStorePutCallback,
};
use crate::{
ffi_panic_boundary, try_arc_from_ptr, try_box_from_ptr, try_mut_from_ptr, try_mut_slice,
try_ref_from_ptr, try_slice, userdata_get, ArcCastPtr, BoxCastPtr, CastConstPtr, CastPtr,
};
/// A server config being constructed. A builder can be modified by,
/// e.g. rustls_server_config_builder_load_native_roots. Once you're
/// done configuring settings, call rustls_server_config_builder_build
/// to turn it into a *const rustls_server_config. This object is not safe
/// for concurrent mutation.
/// <https://docs.rs/rustls/0.20.0/rustls/struct.ConfigBuilder.html>
pub struct rustls_server_config_builder {
// We use the opaque struct pattern to tell C about our types without
// telling them what's inside.
// https://doc.rust-lang.org/nomicon/ffi.html#representing-opaque-structs
_private: [u8; 0],
}
pub(crate) struct ServerConfigBuilder {
base: rustls::ConfigBuilder<ServerConfig, WantsVerifier>,
verifier: Arc<dyn ClientCertVerifier>,
cert_resolver: Option<Arc<dyn ResolvesServerCert>>,
session_storage: Option<Arc<dyn StoresServerSessions + Send + Sync>>,
alpn_protocols: Vec<Vec<u8>>,
ignore_client_order: Option<bool>,
}
impl CastPtr for rustls_server_config_builder {
type RustType = ServerConfigBuilder;
}
impl BoxCastPtr for rustls_server_config_builder {}
/// A server config that is done being constructed and is now read-only.
/// Under the hood, this object corresponds to an Arc<ServerConfig>.
/// <https://docs.rs/rustls/0.20.0/rustls/struct.ServerConfig.html>
pub struct rustls_server_config {
// We use the opaque struct pattern to tell C about our types without
// telling them what's inside.
// https://doc.rust-lang.org/nomicon/ffi.html#representing-opaque-structs
_private: [u8; 0],
}
impl CastConstPtr for rustls_server_config {
type RustType = ServerConfig;
}
impl ArcCastPtr for rustls_server_config {}
impl rustls_server_config_builder {
/// Create a rustls_server_config_builder. Caller owns the memory and must
/// eventually call rustls_server_config_builder_build, then free the
/// resulting rustls_server_config. This uses rustls safe default values
/// for the cipher suites, key exchange groups and protocol versions.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_new() -> *mut rustls_server_config_builder {
ffi_panic_boundary! {
let builder = ServerConfigBuilder {
base: rustls::ServerConfig::builder().with_safe_defaults(),
verifier: NoClientAuth::new(),
cert_resolver: None,
session_storage: None,
alpn_protocols: vec![],
ignore_client_order: None,
};
BoxCastPtr::to_mut_ptr(builder)
}
}
/// Create a rustls_server_config_builder. Caller owns the memory and must
/// eventually call rustls_server_config_builder_build, then free the
/// resulting rustls_server_config. Specify cipher suites in preference
/// order; the `cipher_suites` parameter must point to an array containing
/// `len` pointers to `rustls_supported_ciphersuite` previously obtained
/// from `rustls_all_ciphersuites_get_entry()`. Set the TLS protocol
/// versions to use when negotiating a TLS session.
///
/// `tls_version` is the version of the protocol, as defined in rfc8446,
/// ch. 4.2.1 and end of ch. 5.1. Some values are defined in
/// `rustls_tls_version` for convenience.
///
/// `versions` will only be used during the call and the application retains
/// ownership. `len` is the number of consecutive `uint16_t` pointed to by `versions`.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_new_custom(
cipher_suites: *const *const rustls_supported_ciphersuite,
cipher_suites_len: size_t,
tls_versions: *const u16,
tls_versions_len: size_t,
builder_out: *mut *mut rustls_server_config_builder,
) -> rustls_result {
ffi_panic_boundary! {
let cipher_suites: &[*const rustls_supported_ciphersuite] = try_slice!(cipher_suites, cipher_suites_len);
let mut cs_vec: Vec<SupportedCipherSuite> = Vec::new();
for &cs in cipher_suites.iter() {
let cs = try_ref_from_ptr!(cs);
match ALL_CIPHER_SUITES.iter().find(|&acs| cs.eq(acs)) {
Some(scs) => cs_vec.push(*scs),
None => return InvalidParameter,
}
}
let tls_versions: &[u16] = try_slice!(tls_versions, tls_versions_len);
let mut versions = vec![];
for version_number in tls_versions {
let proto = ProtocolVersion::from(*version_number);
if proto == rustls::version::TLS12.version {
versions.push(&rustls::version::TLS12);
} else if proto == rustls::version::TLS13.version {
versions.push(&rustls::version::TLS13);
}
}
let result = rustls::ServerConfig::builder().with_cipher_suites(&cs_vec).with_safe_default_kx_groups().with_protocol_versions(&versions);
let base = match result {
Ok(new) => new,
Err(_) => return rustls_result::InvalidParameter,
};
let builder = ServerConfigBuilder {
base,
verifier: NoClientAuth::new(),
cert_resolver: None,
session_storage: None,
alpn_protocols: vec![],
ignore_client_order: None,
};
BoxCastPtr::set_mut_ptr(builder_out, builder);
rustls_result::Ok
}
}
/// Create a rustls_server_config_builder for TLS sessions that require
/// valid client certificates. The passed rustls_client_cert_verifier may
/// be used in several builders.
/// If input is NULL, this will return NULL.
/// For memory lifetime, see rustls_server_config_builder_new.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_client_verifier(
config_builder: *mut rustls_server_config_builder,
verifier: *const rustls_client_cert_verifier,
) {
ffi_panic_boundary! {
let mut config_builder = *try_box_from_ptr!(config_builder);
let verifier: Arc<AllowAnyAuthenticatedClient> = try_arc_from_ptr!(verifier);
config_builder.verifier = verifier;
}
}
/// Create a rustls_server_config_builder for TLS sessions that accept
/// valid client certificates, but do not require them. The passed
/// rustls_client_cert_verifier_optional may be used in several builders.
/// If input is NULL, this will return NULL.
/// For memory lifetime, see rustls_server_config_builder_new.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_client_verifier_optional(
config_builder: *mut rustls_server_config_builder,
verifier: *const rustls_client_cert_verifier_optional,
) {
ffi_panic_boundary! {
let mut config_builder = *try_box_from_ptr!(config_builder);
let verifier: Arc<AllowAnyAnonymousOrAuthenticatedClient> = try_arc_from_ptr!(verifier);
config_builder.verifier = verifier;
}
}
/// "Free" a server_config_builder without building it into a rustls_server_config.
/// Normally builders are built into rustls_server_configs via `rustls_server_config_builder_build`
/// and may not be free'd or otherwise used afterwards.
/// Use free only when the building of a config has to be aborted before a config
/// was created.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_free(config: *mut rustls_server_config_builder) {
ffi_panic_boundary! {
BoxCastPtr::to_box(config);
}
}
/// With `ignore` != 0, the server will ignore the client ordering of cipher
/// suites, aka preference, during handshake and respect its own ordering
/// as configured.
/// <https://docs.rs/rustls/0.20.0/rustls/struct.ServerConfig.html#structfield.ignore_client_order>
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_ignore_client_order(
builder: *mut rustls_server_config_builder,
ignore: bool,
) -> rustls_result {
ffi_panic_boundary! {
let config: &mut ServerConfigBuilder = try_mut_from_ptr!(builder);
config.ignore_client_order = Some(ignore);
rustls_result::Ok
}
}
/// Set the ALPN protocol list to the given protocols. `protocols` must point
/// to a buffer of `rustls_slice_bytes` (built by the caller) with `len`
/// elements. Each element of the buffer must point to a slice of bytes that
/// contains a single ALPN protocol from
/// <https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids>.
///
/// This function makes a copy of the data in `protocols` and does not retain
/// any pointers, so the caller can free the pointed-to memory after calling.
///
/// <https://docs.rs/rustls/0.20.0/rustls/server/struct.ServerConfig.html#structfield.alpn_protocols>
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_alpn_protocols(
builder: *mut rustls_server_config_builder,
protocols: *const rustls_slice_bytes,
len: size_t,
) -> rustls_result {
ffi_panic_boundary! {
let config: &mut ServerConfigBuilder = try_mut_from_ptr!(builder);
let protocols: &[rustls_slice_bytes] = try_slice!(protocols, len);
let mut vv: Vec<Vec<u8>> = Vec::new();
for p in protocols {
let v: &[u8] = try_slice!(p.data, p.len);
vv.push(v.to_vec());
}
config.alpn_protocols = vv;
rustls_result::Ok
}
}
/// Provide the configuration a list of certificates where the connection
/// will select the first one that is compatible with the client's signature
/// verification capabilities. Servers that want to support both ECDSA and
/// RSA certificates will want the ECSDA to go first in the list.
///
/// The built configuration will keep a reference to all certified keys
/// provided. The client may `rustls_certified_key_free()` afterwards
/// without the configuration losing them. The same certified key may also
/// be used in multiple configs.
///
/// EXPERIMENTAL: installing a client_hello callback will replace any
/// configured certified keys and vice versa.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_certified_keys(
builder: *mut rustls_server_config_builder,
certified_keys: *const *const rustls_certified_key,
certified_keys_len: size_t,
) -> rustls_result {
ffi_panic_boundary! {
let builder: &mut ServerConfigBuilder = try_mut_from_ptr!(builder);
let keys_ptrs: &[*const rustls_certified_key] = try_slice!(certified_keys, certified_keys_len);
let mut keys: Vec<Arc<CertifiedKey>> = Vec::new();
for &key_ptr in keys_ptrs {
let certified_key: Arc<CertifiedKey> = try_arc_from_ptr!(key_ptr);
keys.push(certified_key);
}
builder.cert_resolver = Some(Arc::new(ResolvesServerCertFromChoices::new(&keys)));
rustls_result::Ok
}
}
/// Turn a *rustls_server_config_builder (mutable) into a const *rustls_server_config
/// (read-only).
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_build(
builder: *mut rustls_server_config_builder,
) -> *const rustls_server_config {
ffi_panic_boundary! {
let builder = try_box_from_ptr!(builder);
let base = builder.base.with_client_cert_verifier(builder.verifier);
let mut config = if let Some(r) = builder.cert_resolver {
base.with_cert_resolver(r)
} else {
return null();
};
if let Some(ss) = builder.session_storage {
config.session_storage = ss;
}
config.alpn_protocols = builder.alpn_protocols;
if let Some(ignore_client_order) = builder.ignore_client_order {
config.ignore_client_order = ignore_client_order;
}
ArcCastPtr::to_const_ptr(config)
}
}
}
impl rustls_server_config {
/// "Free" a rustls_server_config previously returned from
/// rustls_server_config_builder_build. Since rustls_server_config is actually an
/// atomically reference-counted pointer, extant server connections may still
/// hold an internal reference to the Rust object. However, C code must
/// consider this pointer unusable after "free"ing it.
/// Calling with NULL is fine. Must not be called twice with the same value.
#[no_mangle]
pub extern "C" fn rustls_server_config_free(config: *const rustls_server_config) {
ffi_panic_boundary! {
let config: &ServerConfig = try_ref_from_ptr!(config);
// To free the rustls_server_config, we reconstruct the Arc. It should have a refcount of 1,
// representing the C code's copy. When it drops, that refcount will go down to 0
// and the inner ServerConfig will be dropped.
unsafe { drop(Arc::from_raw(config)) };
}
}
/// Create a new rustls_connection containing a server connection, and return it
/// in the output parameter `out`. If this returns an error code, the memory
/// pointed to by `conn_out` remains unchanged. If this returns a non-error,
/// the memory pointed to by `conn_out` is modified to point
/// at a valid rustls_connection. The caller now owns the rustls_connection
/// and must call `rustls_connection_free` when done with it.
#[no_mangle]
pub extern "C" fn rustls_server_connection_new(
config: *const rustls_server_config,
conn_out: *mut *mut rustls_connection,
) -> rustls_result {
ffi_panic_boundary! {
let config: Arc<ServerConfig> = try_arc_from_ptr!(config);
let server_connection = match ServerConnection::new(config) {
Ok(sc) => sc,
Err(e) => return map_error(e),
};
// We've succeeded. Put the server on the heap, and transfer ownership
// to the caller. After this point, we must return CRUSTLS_OK so the
// caller knows it is responsible for this memory.
let c = Connection::from_server(server_connection);
BoxCastPtr::set_mut_ptr(conn_out, c);
rustls_result::Ok
}
}
}
/// Copy the SNI hostname to `buf` which can hold up to `count` bytes,
/// and the length of that hostname in `out_n`. The string is stored in UTF-8
/// with no terminating NUL byte.
/// Returns RUSTLS_RESULT_INSUFFICIENT_SIZE if the SNI hostname is longer than `count`.
/// Returns Ok with *out_n == 0 if there is no SNI hostname available on this connection
/// because it hasn't been processed yet, or because the client did not send SNI.
/// <https://docs.rs/rustls/0.20.0/rustls/server/struct.ServerConnection.html#method.sni_hostname>
#[no_mangle]
pub extern "C" fn rustls_server_connection_get_sni_hostname(
conn: *const rustls_connection,
buf: *mut u8,
count: size_t,
out_n: *mut size_t,
) -> rustls_result {
ffi_panic_boundary! {
let conn: &Connection = try_ref_from_ptr!(conn);
let write_buf: &mut [u8] = try_mut_slice!(buf, count);
let out_n: &mut size_t = try_mut_from_ptr!(out_n);
let server_connection = match conn.as_server() {
Some(s) => s,
_ => return rustls_result::InvalidParameter,
};
let sni_hostname = match server_connection.sni_hostname() {
Some(sni_hostname) => sni_hostname,
None => {
return rustls_result::Ok
},
};
let len: usize = sni_hostname.len();
if len > write_buf.len() {
return rustls_result::InsufficientSize;
}
write_buf[..len].copy_from_slice(sni_hostname.as_bytes());
*out_n = len;
rustls_result::Ok
}
}
/// Choose the server certificate to be used for a connection based on certificate
/// type. Will pick the first CertfiedKey available that is suitable for
/// the SignatureSchemes supported by the client.
struct ResolvesServerCertFromChoices {
choices: Vec<Arc<CertifiedKey>>,
}
impl ResolvesServerCertFromChoices {
pub fn new(choices: &[Arc<CertifiedKey>]) -> Self {
ResolvesServerCertFromChoices {
choices: Vec::from(choices),
}
}
}
impl ResolvesServerCert for ResolvesServerCertFromChoices {
fn resolve(&self, client_hello: ClientHello) -> Option<Arc<CertifiedKey>> {
for key in self.choices.iter() {
if key
.key
.choose_scheme(client_hello.signature_schemes())
.is_some()
{
return Some(key.clone());
}
}
None
}
}
/// The TLS Client Hello information provided to a ClientHelloCallback function.
/// `sni_name` is the SNI servername provided by the client. If the client
/// did not provide an SNI, the length of this `rustls_string` will be 0. The
/// signature_schemes carries the values supplied by the client or, should
/// the client not use this TLS extension, the default schemes in the rustls
/// library. See: <https://docs.rs/rustls/0.20.0/rustls/internal/msgs/enums/enum.SignatureScheme.html>.
/// `alpn` carries the list of ALPN protocol names that the client proposed to
/// the server. Again, the length of this list will be 0 if none were supplied.
///
/// All this data, when passed to a callback function, is only accessible during
/// the call and may not be modified. Users of this API must copy any values that
/// they want to access when the callback returned.
///
/// EXPERIMENTAL: this feature of rustls-ffi is likely to change in the future, as
/// the rustls library is re-evaluating their current approach to client hello handling.
#[repr(C)]
pub struct rustls_client_hello<'a> {
sni_name: rustls_str<'a>,
signature_schemes: rustls_slice_u16<'a>,
alpn: *const rustls_slice_slice_bytes<'a>,
}
impl<'a> CastPtr for rustls_client_hello<'a> {
type RustType = rustls_client_hello<'a>;
}
/// Any context information the callback will receive when invoked.
pub type rustls_client_hello_userdata = *mut c_void;
/// Prototype of a callback that can be installed by the application at the
/// `rustls_server_config`. This callback will be invoked by a `rustls_connection`
/// once the TLS client hello message has been received.
/// `userdata` will be set based on rustls_connection_set_userdata.
/// `hello` gives the value of the available client announcements, as interpreted
/// by rustls. See the definition of `rustls_client_hello` for details.
///
/// NOTE:
/// - the passed in `hello` and all its values are only available during the
/// callback invocations.
/// - the passed callback function must be safe to call multiple times concurrently
/// with the same userdata, unless there is only a single config and connection
/// where it is installed.
///
/// EXPERIMENTAL: this feature of rustls-ffi is likely to change in the future, as
/// the rustls library is re-evaluating their current approach to client hello handling.
pub type rustls_client_hello_callback = Option<
unsafe extern "C" fn(
userdata: rustls_client_hello_userdata,
hello: *const rustls_client_hello,
) -> *const rustls_certified_key,
>;
// This is the same as a rustls_verify_server_cert_callback after unwrapping
// the Option (which is equivalent to checking for null).
type ClientHelloCallback = unsafe extern "C" fn(
userdata: rustls_client_hello_userdata,
hello: *const rustls_client_hello,
) -> *const rustls_certified_key;
struct ClientHelloResolver {
/// Implementation of rustls::ResolvesServerCert that passes values
/// from the supplied ClientHello to the callback function.
pub callback: ClientHelloCallback,
}
impl ClientHelloResolver {
pub fn new(callback: ClientHelloCallback) -> ClientHelloResolver {
ClientHelloResolver { callback }
}
}
impl ResolvesServerCert for ClientHelloResolver {
fn resolve(&self, client_hello: ClientHello) -> Option<Arc<CertifiedKey>> {
let sni_name: &str = {
match client_hello.server_name() {
Some(c) => c,
None => "",
}
};
let sni_name: rustls_str = match sni_name.try_into() {
Ok(r) => r,
Err(_) => return None,
};
let mapped_sigs: Vec<u16> = client_hello
.signature_schemes()
.iter()
.map(|s| s.get_u16())
.collect();
// Unwrap the Option. None becomes an empty slice.
let alpn = match client_hello.alpn() {
Some(iter) => iter.collect(),
None => vec![],
};
let alpn = rustls_slice_slice_bytes { inner: &alpn };
let signature_schemes: rustls_slice_u16 = (&*mapped_sigs).into();
let hello = rustls_client_hello {
sni_name,
signature_schemes,
alpn: &alpn,
};
let cb = self.callback;
let userdata = match userdata_get() {
Ok(u) => u,
Err(_) => return None,
};
let key_ptr: *const rustls_certified_key = unsafe { cb(userdata, &hello) };
let certified_key: &CertifiedKey = try_ref_from_ptr!(key_ptr);
Some(Arc::new(certified_key.clone()))
}
}
/// This struct can be considered thread safe, as long
/// as the registered callbacks are thread safe. This is
/// documented as a requirement in the API.
unsafe impl Sync for ClientHelloResolver {}
unsafe impl Send for ClientHelloResolver {}
impl rustls_server_config_builder {
/// Register a callback to be invoked when a connection created from this config
/// sees a TLS ClientHello message. If `userdata` has been set with
/// rustls_connection_set_userdata, it will be passed to the callback.
/// Otherwise the userdata param passed to the callback will be NULL.
///
/// Any existing `ResolvesServerCert` implementation currently installed in the
/// `rustls_server_config` will be replaced. This also means registering twice
/// will overwrite the first registration. It is not permitted to pass a NULL
/// value for `callback`.
///
/// EXPERIMENTAL: this feature of rustls-ffi is likely to change in the future, as
/// the rustls library is re-evaluating their current approach to client hello handling.
/// Installing a client_hello callback will replace any configured certified keys
/// and vice versa. Same holds true for the set_certified_keys variant.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_hello_callback(
builder: *mut rustls_server_config_builder,
callback: rustls_client_hello_callback,
) -> rustls_result {
ffi_panic_boundary! {
let callback: ClientHelloCallback = match callback {
Some(cb) => cb,
None => return rustls_result::NullParameter,
};
let builder: &mut ServerConfigBuilder = try_mut_from_ptr!(builder);
builder.cert_resolver = Some(Arc::new(ClientHelloResolver::new(
callback
)));
rustls_result::Ok
}
}
}
// Turn a slice of u16's into a vec of SignatureScheme as needed by rustls.
fn sigschemes(input: &[u16]) -> Vec<SignatureScheme> {
use rustls::SignatureScheme::*;
input
.iter()
.map(|n| match n {
// TODO: Once rustls 0.20.0+ is released, we can use `.into()` instead of this match.
0x0201 => RSA_PKCS1_SHA1,
0x0203 => ECDSA_SHA1_Legacy,
0x0401 => RSA_PKCS1_SHA256,
0x0403 => ECDSA_NISTP256_SHA256,
0x0501 => RSA_PKCS1_SHA384,
0x0503 => ECDSA_NISTP384_SHA384,
0x0601 => RSA_PKCS1_SHA512,
0x0603 => ECDSA_NISTP521_SHA512,
0x0804 => RSA_PSS_SHA256,
0x0805 => RSA_PSS_SHA384,
0x0806 => RSA_PSS_SHA512,
0x0807 => ED25519,
0x0808 => ED448,
n => SignatureScheme::Unknown(*n),
})
.collect()
}
/// Select a `rustls_certified_key` from the list that matches the cryptographic
/// parameters of a TLS client hello. Note that this does not do any SNI matching.
/// The input certificates should already have been filtered to ones matching the
/// SNI from the client hello.
///
/// This is intended for servers that are configured with several keys for the
/// same domain name(s), for example ECDSA and RSA types. The presented keys are
/// inspected in the order given and keys first in the list are given preference,
/// all else being equal. However rustls is free to choose whichever it considers
/// to be the best key with its knowledge about security issues and possible future
/// extensions of the protocol.
///
/// Return RUSTLS_RESULT_OK if a key was selected and RUSTLS_RESULT_NOT_FOUND
/// if none was suitable.
#[no_mangle]
pub extern "C" fn rustls_client_hello_select_certified_key(
hello: *const rustls_client_hello,
certified_keys: *const *const rustls_certified_key,
certified_keys_len: size_t,
out_key: *mut *const rustls_certified_key,
) -> rustls_result {
ffi_panic_boundary! {
let hello = try_ref_from_ptr!(hello);
let schemes: Vec<SignatureScheme> = sigschemes(try_slice!(hello.signature_schemes.data, hello.signature_schemes.len));
let out_key: &mut *const rustls_certified_key = unsafe {
match out_key.as_mut() {
Some(out_key) => out_key,
None => return NullParameter,
}
};
let keys_ptrs: &[*const rustls_certified_key] = try_slice!(certified_keys, certified_keys_len);
for &key_ptr in keys_ptrs {
let key_ref: &CertifiedKey = try_ref_from_ptr!(key_ptr);
if key_ref.key.choose_scheme(&schemes).is_some() {
*out_key = key_ptr;
return rustls_result::Ok;
}
}
rustls_result::NotFound
}
}
impl rustls_server_config_builder {
/// Register callbacks for persistence of TLS session IDs and secrets. Both
/// keys and values are highly sensitive data, containing enough information
/// to break the security of the connections involved.
///
/// If `userdata` has been set with rustls_connection_set_userdata, it
/// will be passed to the callbacks. Otherwise the userdata param passed to
/// the callbacks will be NULL.
#[no_mangle]
pub extern "C" fn rustls_server_config_builder_set_persistence(
builder: *mut rustls_server_config_builder,
get_cb: rustls_session_store_get_callback,
put_cb: rustls_session_store_put_callback,
) -> rustls_result {
ffi_panic_boundary! {
let get_cb: SessionStoreGetCallback = match get_cb {
Some(cb) => cb,
None => return rustls_result::NullParameter,
};
let put_cb: SessionStorePutCallback = match put_cb {
Some(cb) => cb,
None => return rustls_result::NullParameter,
};
let builder: &mut ServerConfigBuilder = try_mut_from_ptr!(builder);
builder.session_storage = Some(Arc::new(SessionStoreBroker::new(
get_cb, put_cb
)));
rustls_result::Ok
}
}
}
#[cfg(test)]
mod tests {
use std::ptr::null_mut;
use super::*;
#[test]
fn test_config_builder() {
let builder: *mut rustls_server_config_builder =
rustls_server_config_builder::rustls_server_config_builder_new();
let h1 = "http/1.1".as_bytes();
let h2 = "h2".as_bytes();
let alpn: Vec<rustls_slice_bytes> = vec![h1.into(), h2.into()];
rustls_server_config_builder::rustls_server_config_builder_set_alpn_protocols(
builder,
alpn.as_ptr(),
alpn.len(),
);
let config = rustls_server_config_builder::rustls_server_config_builder_build(builder);
{
let config2 = try_ref_from_ptr!(config);
assert_eq!(config2.alpn_protocols, vec![h1, h2]);
}
rustls_server_config::rustls_server_config_free(config);
}
// Build a server connection and test the getters and initial values.
#[test]
fn test_server_config_builder_new_empty() {
let builder: *mut rustls_server_config_builder =
rustls_server_config_builder::rustls_server_config_builder_new();
// Building a config with no certificate and key configured results in null.
assert_eq!(
rustls_server_config_builder::rustls_server_config_builder_build(builder),
null()
);
}
#[test]
#[cfg_attr(miri, ignore)]
fn test_server_connection_new() {
let builder: *mut rustls_server_config_builder =
rustls_server_config_builder::rustls_server_config_builder_new();
let cert_pem = include_str!("../localhost/cert.pem").as_bytes();
let key_pem = include_str!("../localhost/key.pem").as_bytes();
let mut certified_key: *const rustls_certified_key = null();
let result = rustls_certified_key::rustls_certified_key_build(
cert_pem.as_ptr(),
cert_pem.len(),
key_pem.as_ptr(),
key_pem.len(),
&mut certified_key,
);
if !matches!(result, rustls_result::Ok) {
panic!(
"expected RUSTLS_RESULT_OK from rustls_certified_key_build, got {:?}",
result
);
}
rustls_server_config_builder::rustls_server_config_builder_set_certified_keys(
builder,
&certified_key,
1,
);
let config = rustls_server_config_builder::rustls_server_config_builder_build(builder);
assert_ne!(config, null());
let mut conn: *mut rustls_connection = null_mut();
let result = rustls_server_config::rustls_server_connection_new(config, &mut conn);
if !matches!(result, rustls_result::Ok) {
panic!("expected RUSTLS_RESULT_OK, got {:?}", result);
}
assert_eq!(rustls_connection::rustls_connection_wants_read(conn), true);
assert_eq!(
rustls_connection::rustls_connection_wants_write(conn),
false
);
assert_eq!(
rustls_connection::rustls_connection_is_handshaking(conn),
true
);
let some_byte = 42u8;
let mut alpn_protocol: *const u8 = &some_byte;
let mut alpn_protocol_len: usize = 1;
rustls_connection::rustls_connection_get_alpn_protocol(
conn,
&mut alpn_protocol,
&mut alpn_protocol_len,
);
assert_eq!(alpn_protocol, null());
assert_eq!(alpn_protocol_len, 0);
assert_eq!(
rustls_connection::rustls_connection_get_negotiated_ciphersuite(conn),
null()
);
assert_eq!(
rustls_connection::rustls_connection_get_peer_certificate(conn, 0),
null()
);
assert_eq!(
rustls_connection::rustls_connection_get_protocol_version(conn),
0
);
rustls_connection::rustls_connection_free(conn);
}
}
| 42.07868 | 149 | 0.659569 |
183a3d6bca83703f35ec11539689b8e1cbdffc7b | 41 | mod macros;
pub mod serial;
pub mod vga;
| 10.25 | 15 | 0.731707 |
f7c34eb745cb86ee77fab0d945ff89e2776f7c7a | 1,725 | use super::action::Action;
/// Describes a edit context for modifying a line.
#[derive(Debug)]
pub struct EditContext {
action: Option<Action>,
content: Option<String>,
}
impl EditContext {
/// Create a new empty instance.
#[must_use]
pub const fn new() -> Self {
Self {
action: None,
content: None,
}
}
/// Set the action.
#[must_use]
pub const fn action(mut self, action: Action) -> Self {
self.action = Some(action);
self
}
/// Set the content.
#[must_use]
pub fn content(mut self, content: &str) -> Self {
self.content = Some(content.to_owned());
self
}
/// Get the action.
#[must_use]
pub const fn get_action(&self) -> &Option<Action> {
&self.action
}
/// Get the content.
#[must_use]
pub const fn get_content(&self) -> &Option<String> {
&self.content
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty() {
let edit_context = EditContext::new();
assert_eq!(edit_context.get_action(), &None);
assert_eq!(edit_context.get_content(), &None);
}
#[test]
fn with_action() {
let edit_context = EditContext::new().action(Action::Break);
assert_eq!(edit_context.get_action(), &Some(Action::Break));
assert_eq!(edit_context.get_content(), &None);
}
#[test]
fn with_content() {
let edit_context = EditContext::new().content("test content");
assert_eq!(edit_context.get_action(), &None);
assert_eq!(edit_context.get_content(), &Some(String::from("test content")));
}
#[test]
fn with_content_and_action() {
let edit_context = EditContext::new().action(Action::Edit).content("test content");
assert_eq!(edit_context.get_action(), &Some(Action::Edit));
assert_eq!(edit_context.get_content(), &Some(String::from("test content")));
}
}
| 21.835443 | 85 | 0.666667 |
891125534197b8774ba1e47ffffaf8936218114a | 6,894 | use std::marker::PhantomData;
use super::methods::*;
use super::protocol::Protocol;
use super::protocol::ProtocolId;
use super::RPCError;
use crate::rpc::protocol::Encoding;
use crate::rpc::protocol::Version;
use crate::rpc::{
codec::{base::BaseOutboundCodec, ssz_snappy::SSZSnappyOutboundCodec, OutboundCodec},
methods::ResponseTermination,
};
use futures::future::BoxFuture;
use futures::prelude::{AsyncRead, AsyncWrite};
use futures::{FutureExt, SinkExt};
use libp2p::core::{OutboundUpgrade, UpgradeInfo};
use std::sync::Arc;
use tokio_util::{
codec::Framed,
compat::{Compat, FuturesAsyncReadCompatExt},
};
use types::{EthSpec, ForkContext};
/* Outbound request */
// Combines all the RPC requests into a single enum to implement `UpgradeInfo` and
// `OutboundUpgrade`
#[derive(Debug, Clone)]
pub struct OutboundRequestContainer<TSpec: EthSpec> {
pub req: OutboundRequest<TSpec>,
pub fork_context: Arc<ForkContext>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum OutboundRequest<TSpec: EthSpec> {
Status(StatusMessage),
Goodbye(GoodbyeReason),
BlocksByRange(BlocksByRangeRequest),
BlocksByRoot(BlocksByRootRequest),
Ping(Ping),
MetaData(PhantomData<TSpec>),
}
impl<TSpec: EthSpec> UpgradeInfo for OutboundRequestContainer<TSpec> {
type Info = ProtocolId;
type InfoIter = Vec<Self::Info>;
// add further protocols as we support more encodings/versions
fn protocol_info(&self) -> Self::InfoIter {
self.req.supported_protocols()
}
}
/// Implements the encoding per supported protocol for `RPCRequest`.
impl<TSpec: EthSpec> OutboundRequest<TSpec> {
pub fn supported_protocols(&self) -> Vec<ProtocolId> {
match self {
// add more protocols when versions/encodings are supported
OutboundRequest::Status(_) => vec![ProtocolId::new(
Protocol::Status,
Version::V1,
Encoding::SSZSnappy,
)],
OutboundRequest::Goodbye(_) => vec![ProtocolId::new(
Protocol::Goodbye,
Version::V1,
Encoding::SSZSnappy,
)],
OutboundRequest::BlocksByRange(_) => vec![
ProtocolId::new(Protocol::BlocksByRange, Version::V2, Encoding::SSZSnappy),
ProtocolId::new(Protocol::BlocksByRange, Version::V1, Encoding::SSZSnappy),
],
OutboundRequest::BlocksByRoot(_) => vec![
ProtocolId::new(Protocol::BlocksByRoot, Version::V2, Encoding::SSZSnappy),
ProtocolId::new(Protocol::BlocksByRoot, Version::V1, Encoding::SSZSnappy),
],
OutboundRequest::Ping(_) => vec![ProtocolId::new(
Protocol::Ping,
Version::V1,
Encoding::SSZSnappy,
)],
OutboundRequest::MetaData(_) => vec![
ProtocolId::new(Protocol::MetaData, Version::V2, Encoding::SSZSnappy),
ProtocolId::new(Protocol::MetaData, Version::V1, Encoding::SSZSnappy),
],
}
}
/* These functions are used in the handler for stream management */
/// Number of responses expected for this request.
pub fn expected_responses(&self) -> u64 {
match self {
OutboundRequest::Status(_) => 1,
OutboundRequest::Goodbye(_) => 0,
OutboundRequest::BlocksByRange(req) => req.count,
OutboundRequest::BlocksByRoot(req) => req.block_roots.len() as u64,
OutboundRequest::Ping(_) => 1,
OutboundRequest::MetaData(_) => 1,
}
}
/// Gives the corresponding `Protocol` to this request.
pub fn protocol(&self) -> Protocol {
match self {
OutboundRequest::Status(_) => Protocol::Status,
OutboundRequest::Goodbye(_) => Protocol::Goodbye,
OutboundRequest::BlocksByRange(_) => Protocol::BlocksByRange,
OutboundRequest::BlocksByRoot(_) => Protocol::BlocksByRoot,
OutboundRequest::Ping(_) => Protocol::Ping,
OutboundRequest::MetaData(_) => Protocol::MetaData,
}
}
/// Returns the `ResponseTermination` type associated with the request if a stream gets
/// terminated.
pub fn stream_termination(&self) -> ResponseTermination {
match self {
// this only gets called after `multiple_responses()` returns true. Therefore, only
// variants that have `multiple_responses()` can have values.
OutboundRequest::BlocksByRange(_) => ResponseTermination::BlocksByRange,
OutboundRequest::BlocksByRoot(_) => ResponseTermination::BlocksByRoot,
OutboundRequest::Status(_) => unreachable!(),
OutboundRequest::Goodbye(_) => unreachable!(),
OutboundRequest::Ping(_) => unreachable!(),
OutboundRequest::MetaData(_) => unreachable!(),
}
}
}
/* RPC Response type - used for outbound upgrades */
/* Outbound upgrades */
pub type OutboundFramed<TSocket, TSpec> = Framed<Compat<TSocket>, OutboundCodec<TSpec>>;
impl<TSocket, TSpec> OutboundUpgrade<TSocket> for OutboundRequestContainer<TSpec>
where
TSpec: EthSpec + Send + 'static,
TSocket: AsyncRead + AsyncWrite + Unpin + Send + 'static,
{
type Output = OutboundFramed<TSocket, TSpec>;
type Error = RPCError;
type Future = BoxFuture<'static, Result<Self::Output, Self::Error>>;
fn upgrade_outbound(self, socket: TSocket, protocol: Self::Info) -> Self::Future {
// convert to a tokio compatible socket
let socket = socket.compat();
let codec = match protocol.encoding {
Encoding::SSZSnappy => {
let ssz_snappy_codec = BaseOutboundCodec::new(SSZSnappyOutboundCodec::new(
protocol,
usize::max_value(),
self.fork_context.clone(),
));
OutboundCodec::SSZSnappy(ssz_snappy_codec)
}
};
let mut socket = Framed::new(socket, codec);
async {
socket.send(self.req).await?;
socket.close().await?;
Ok(socket)
}
.boxed()
}
}
impl<TSpec: EthSpec> std::fmt::Display for OutboundRequest<TSpec> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
OutboundRequest::Status(status) => write!(f, "Status Message: {}", status),
OutboundRequest::Goodbye(reason) => write!(f, "Goodbye: {}", reason),
OutboundRequest::BlocksByRange(req) => write!(f, "Blocks by range: {}", req),
OutboundRequest::BlocksByRoot(req) => write!(f, "Blocks by root: {:?}", req),
OutboundRequest::Ping(ping) => write!(f, "Ping: {}", ping.data),
OutboundRequest::MetaData(_) => write!(f, "MetaData request"),
}
}
}
| 37.672131 | 95 | 0.615753 |
e900e7578fdb3e4705837752b9c4face253822c8 | 1,458 | use std::{collections::HashMap, ops::AddAssign};
use anyhow::Result;
use itertools::Itertools;
fn id(a: &str, i: usize) -> char {
a.chars().nth(i).unwrap()
}
pub fn day14(path: &str) -> Result<()> {
let file = std::fs::read_to_string(path)?;
dbg!(&file);
let (template, rules) = file.split_once("\n\n").unwrap();
let rules: HashMap<_, _> = rules
.lines()
.map(|line| line.split_once(" -> ").unwrap())
.map(|(a, b)| (id(a, 0), id(a, 1), id(b, 0)))
.map(|(a, b, c)| ((a, b), ((a, c), (c, b))))
.collect();
dbg!(&rules);
let mut counts: HashMap<(char, char), usize> = template.chars().tuple_windows().counts();
dbg!(&counts);
for _ in 0..40 {
let mut ncounts: HashMap<_, _> = HashMap::new();
for (pair, count) in &counts {
ncounts.entry(rules[pair].0).or_insert(0).add_assign(count);
ncounts.entry(rules[pair].1).or_insert(0).add_assign(count);
}
counts = ncounts;
dbg!(&counts);
}
let final_counts = counts
.iter()
.map(|((a, _), count)| (*a, *count))
.chain(Some((template.chars().last().unwrap(), 1)))
.into_group_map_by(|f| f.0)
.into_iter()
.map(|(c, counts)| (c, counts.iter().map(|p| p.1).sum::<usize>()))
.sorted_by_key(|p| p.1)
.collect_vec();
dbg!(final_counts.last().unwrap().1 - final_counts.first().unwrap().1);
Ok(())
}
| 30.375 | 93 | 0.527435 |
1d78bdadad4a7ab3a267e91864eebfc67b11b0c5 | 21,950 | use crate::conversion::coords::{CoordSeq, CoordType, ToSFCGALGeom};
use crate::{
utils::check_null_geom, GeomType, Point2d, Result, SFCGeometry, ToCoordinates, ToSFCGAL,
};
use failure::Error;
use sfcgal_sys::{
sfcgal_geometry_collection_add_geometry, sfcgal_geometry_collection_create,
sfcgal_geometry_collection_geometry_n, sfcgal_geometry_collection_num_geometries,
sfcgal_geometry_t, sfcgal_linestring_add_point, sfcgal_linestring_create,
sfcgal_linestring_num_points, sfcgal_linestring_point_n, sfcgal_multi_linestring_create,
sfcgal_multi_point_create, sfcgal_multi_polygon_create, sfcgal_point_create_from_xy,
sfcgal_point_x, sfcgal_point_y, sfcgal_polygon_add_interior_ring,
sfcgal_polygon_create_from_exterior_ring, sfcgal_polygon_exterior_ring,
sfcgal_polygon_interior_ring_n, sfcgal_polygon_num_interior_rings, sfcgal_triangle_create,
sfcgal_triangle_set_vertex_from_xy,
};
use std::convert::Into;
use std::iter::FromIterator;
/// Conversion from [`SFCGeometry`] (implemented on [geo-types](https://docs.rs/geo-types/) geometries)
///
/// [`SFCGeometry`]: struct.SFCGeometry.html
pub trait TryInto<T> {
type Err;
fn try_into(self) -> Result<T>;
}
impl CoordType for geo_types::Point<f64> {}
impl CoordType for geo_types::Coordinate<f64> {}
impl ToSFCGALGeom for geo_types::Point<f64> {
fn to_sfcgeometry(&self) -> Result<*mut sfcgal_geometry_t> {
let g = unsafe { sfcgal_point_create_from_xy(self.x(), self.y()) };
check_null_geom(g)?;
Ok(g)
}
}
impl ToSFCGALGeom for geo_types::Coordinate<f64> {
fn to_sfcgeometry(&self) -> Result<*mut sfcgal_geometry_t> {
let g = unsafe { sfcgal_point_create_from_xy(self.x, self.y) };
check_null_geom(g)?;
Ok(g)
}
}
/// Implements conversion from CoordSeq to geo_types::Geometry
/// (better use TryInto<geo_types::Geometry> for SFCGeometry if the intend
/// is to convert SFCGAL Geometries to geo_types ones)
impl TryInto<geo_types::Geometry<f64>> for CoordSeq<Point2d> {
type Err = Error;
fn try_into(self) -> Result<geo_types::Geometry<f64>> {
match self {
CoordSeq::Point(pt) => Ok(geo_types::Point(pt.into()).into()),
CoordSeq::Multipoint(pts) => Ok(geo_types::MultiPoint::from_iter(pts.into_iter()).into()),
CoordSeq::Linestring(pts) => Ok(geo_types::LineString::from_iter(pts.into_iter()).into()),
CoordSeq::Multilinestring(lines) => {
Ok(geo_types::MultiLineString(
lines.into_iter()
.map(geo_types::LineString::from)
.collect()
).into())
},
CoordSeq::Polygon(rings) => {
let mut it = rings.into_iter();
let exterior = geo_types::LineString::from(it.next().unwrap());
let interiors = it.map(geo_types::LineString::from).collect::<Vec<geo_types::LineString<f64>>>();
Ok(geo_types::Polygon::new(exterior, interiors).into())
},
CoordSeq::Multipolygon(polygons) => {
let polys = polygons.into_iter().map(|p| {
let a: geo_types::Geometry<f64> = CoordSeq::Polygon(p).try_into()?;
if let Some(poly) = a.into_polygon() {
Ok(poly)
} else {
Err(format_err!("Error while building geo_types::MultiPolygon"))
}
}).collect::<Result<Vec<geo_types::Polygon<f64>>>>()?;
Ok(geo_types::MultiPolygon(polys).into())
},
CoordSeq::Geometrycollection(collection) => {
Ok(geo_types::Geometry::GeometryCollection(
geo_types::GeometryCollection(collection
.into_iter()
.map(|g| g.try_into())
.collect::<Result<Vec<geo_types::Geometry<f64>>>>()?
)
))
},
_ => Err(
format_err!(
"Conversion from CoordSeq variants `Solid`, `Multisolid`, `Triangulatedsurface` and `Polyhedralsurface` are not yet implemented!"))
}
}
}
/// Implements faillible conversion from SFCGeometry to geo_types::Geometry.
///
/// This is notably faillible because some types of [`SFCGeometry`] like GeoTypes::Polyhedralsurface
/// don't have equivalents in geo_types::Geometry.
/// Please note that geo_types Coordinate and Point primitives are 2d only, so
/// every information about z coordinate (if any) won't be taken into account.
impl TryInto<geo_types::Geometry<f64>> for SFCGeometry {
type Err = Error;
fn try_into(self) -> Result<geo_types::Geometry<f64>> {
match self._type()? {
GeomType::Point => {
let c = self.to_coordinates::<Point2d>()?;
let p: geo_types::Point<f64> = match c {
CoordSeq::Point(pt) => pt.into(),
_ => unimplemented!(),
};
Ok(geo_types::Geometry::Point(p))
},
GeomType::Multipoint => {
let c = self.to_coordinates::<Point2d>()?;
let p: geo_types::MultiPoint<f64> = match c {
CoordSeq::Multipoint(pts) => pts.into(),
_ => unimplemented!(),
};
Ok(geo_types::Geometry::MultiPoint(p))
},
GeomType::Linestring => {
Ok(
geo_types::Geometry::LineString(
geo_line_from_sfcgal(unsafe { self.c_geom.as_ref() })?
)
)
},
GeomType::Multilinestring => {
let ngeoms = unsafe {
sfcgal_geometry_collection_num_geometries(self.c_geom.as_ref())
};
let mut lines = Vec::with_capacity(ngeoms);
for i in 0..ngeoms {
let geom = unsafe { sfcgal_geometry_collection_geometry_n(self.c_geom.as_ref(), i) };
lines.push(geo_line_from_sfcgal(geom)?);
}
Ok(
geo_types::Geometry::MultiLineString(
geo_types::MultiLineString(lines)
)
)
},
GeomType::Polygon => {
let nrings = unsafe { sfcgal_polygon_num_interior_rings(self.c_geom.as_ref()) };
let exterior_sfcgal = unsafe { sfcgal_polygon_exterior_ring(self.c_geom.as_ref()) };
let exterior_geo = geo_line_from_sfcgal(exterior_sfcgal)?;
let mut interiors_geo = Vec::with_capacity(nrings);
for i in 0..nrings {
let line_sfcgal = unsafe {
sfcgal_polygon_interior_ring_n(self.c_geom.as_ref(), i)
};
interiors_geo.push(geo_line_from_sfcgal(line_sfcgal)?);
}
Ok(
geo_types::Geometry::Polygon(
geo_types::Polygon::new(exterior_geo, interiors_geo)
)
)
}
GeomType::Multipolygon => {
let ngeoms = unsafe {
sfcgal_geometry_collection_num_geometries(self.c_geom.as_ref())
};
let mut vec_polygons = Vec::with_capacity(ngeoms);
for i in 0..ngeoms {
let _polyg = unsafe { sfcgal_geometry_collection_geometry_n(self.c_geom.as_ref(), i) };
let nrings = unsafe { sfcgal_polygon_num_interior_rings(_polyg) };
let exterior_sfcgal = unsafe { sfcgal_polygon_exterior_ring(_polyg) };
let exterior_geo = geo_line_from_sfcgal(exterior_sfcgal)?;
let mut interiors_geo = Vec::with_capacity(nrings);
for j in 0..nrings {
let line_sfcgal = unsafe {
sfcgal_polygon_interior_ring_n(_polyg, j)
};
interiors_geo.push(geo_line_from_sfcgal(line_sfcgal)?);
}
vec_polygons.push(geo_types::Polygon::new(exterior_geo, interiors_geo));
}
Ok(
geo_types::MultiPolygon(vec_polygons).into()
)
},
GeomType::Geometrycollection => {
let c = self.to_coordinates::<Point2d>()?;
let p = match c {
CoordSeq::Geometrycollection(g) => {
g.into_iter()
.map(|g| g.try_into())
.collect::<Result<Vec<geo_types::Geometry<f64>>>>()?
},
_ => unimplemented!()
};
Ok(geo_types::Geometry::GeometryCollection(geo_types::GeometryCollection(p)))
},
_ => Err(
format_err!(
"Conversion from SFCGeometry of type `Triangle`, `Solid`, `Multisolid`, \
`Triangulatedsurface` and `Polyhedralsurface` \
to geo_types::Geometry are not yet implemented!")
)
}
}
}
fn geo_line_from_sfcgal(
sfcgal_geom: *const sfcgal_geometry_t,
) -> Result<geo_types::LineString<f64>> {
let n_points = unsafe { sfcgal_linestring_num_points(sfcgal_geom) };
let mut v_points = Vec::with_capacity(n_points);
for i in 0..n_points {
let pt_sfcgal = unsafe { sfcgal_linestring_point_n(sfcgal_geom, i) };
check_null_geom(pt_sfcgal)?;
let pt_geom = geo_point_from_sfcgal(pt_sfcgal);
v_points.push(pt_geom);
}
Ok(geo_types::LineString::from(v_points))
}
fn geo_point_from_sfcgal(geom: *const sfcgal_geometry_t) -> geo_types::Point<f64> {
let x = unsafe { sfcgal_point_x(geom) };
let y = unsafe { sfcgal_point_y(geom) };
geo_types::Point::new(x, y)
}
/// Create a `SFCGeometry` from a geo-types Point
impl ToSFCGAL for geo_types::Point<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
unsafe { SFCGeometry::new_from_raw(sfcgal_point_create_from_xy(self.x(), self.y()), true) }
}
}
/// Create a `SFCGeometry` from a geo-types MultiPoint
impl ToSFCGAL for geo_types::MultiPoint<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
make_sfcgal_multi_geom!(
sfcgal_multi_point_create(),
self.0
.iter()
.map(|pt| pt.to_sfcgeometry())
.collect::<Result<Vec<_>>>()?
)
}
}
/// Create a `SFCGeometry` from a geo-types Line
impl ToSFCGAL for geo_types::Line<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
let out_linestring = unsafe { sfcgal_linestring_create() };
check_null_geom(out_linestring)?;
let start = unsafe { sfcgal_point_create_from_xy(self.start.x, self.start.y) };
let end = unsafe { sfcgal_point_create_from_xy(self.end.x, self.end.y) };
check_null_geom(start)?;
check_null_geom(end)?;
unsafe {
sfcgal_linestring_add_point(out_linestring, start);
sfcgal_linestring_add_point(out_linestring, end);
SFCGeometry::new_from_raw(out_linestring, true)
}
}
}
/// Create a `SFCGeometry` from a geo-types LineString
impl ToSFCGAL for geo_types::LineString<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
let line = (&self.0).to_sfcgeometry()?;
unsafe { SFCGeometry::new_from_raw(line, true) }
}
}
/// Create a `SFCGeometry` from a geo-types MultiLineString
impl ToSFCGAL for geo_types::MultiLineString<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
make_sfcgal_multi_geom!(
sfcgal_multi_linestring_create(),
self.0
.iter()
.map(|line| line.0.to_sfcgeometry())
.collect::<Result<Vec<_>>>()?
)
}
}
/// Create a `SFCGeometry` from a geo-types Triangle
impl ToSFCGAL for geo_types::Triangle<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
let out_triangle = unsafe { sfcgal_triangle_create() };
check_null_geom(out_triangle)?;
let &geo_types::Triangle(ref c0, ref c1, ref c2) = self;
unsafe {
sfcgal_triangle_set_vertex_from_xy(out_triangle, 0, c0.x, c0.y);
sfcgal_triangle_set_vertex_from_xy(out_triangle, 1, c1.x, c1.y);
sfcgal_triangle_set_vertex_from_xy(out_triangle, 2, c2.x, c2.y);
SFCGeometry::new_from_raw(out_triangle, true)
}
}
}
fn geo_polygon_to_sfcgal<T>(
exterior: &Vec<T>,
interiors: &[geo_types::LineString<f64>],
) -> Result<*mut sfcgal_geometry_t>
where
T: ToSFCGALGeom + CoordType,
{
let out_polygon =
unsafe { sfcgal_polygon_create_from_exterior_ring(exterior.to_sfcgeometry()?) };
check_null_geom(out_polygon)?;
for ring in interiors.iter() {
unsafe { sfcgal_polygon_add_interior_ring(out_polygon, ring.0.to_sfcgeometry()?) };
}
Ok(out_polygon)
}
/// Create a `SFCGeometry` from a geo-types Polygon
impl ToSFCGAL for geo_types::Polygon<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
// let geo_types::Polygon{exterior, interiors} = self;
let (exterior, interiors) = (self.exterior(), self.interiors());
let out_polygon = geo_polygon_to_sfcgal(&exterior.0, &interiors)?;
unsafe { SFCGeometry::new_from_raw(out_polygon, true) }
}
}
/// Create a `SFCGeometry` from a geo-types MultiPolygon
impl ToSFCGAL for geo_types::MultiPolygon<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
make_sfcgal_multi_geom!(
sfcgal_multi_polygon_create(),
self.0
.iter()
.map(|polygon| {
// let geo_types::Polygon{ref exterior, ref interiors} = polygon;
let (exterior, interiors) = (polygon.exterior(), polygon.interiors());
geo_polygon_to_sfcgal(&exterior.0, &interiors)
})
.collect::<Result<Vec<_>>>()?
)
}
}
/// Create a `SFCGeometry` from a geo-types GeometryCollection
impl ToSFCGAL for geo_types::GeometryCollection<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
make_sfcgal_multi_geom!(
sfcgal_geometry_collection_create(),
self.0
.iter()
.map(|geom| {
let mut _g = geom.to_sfcgal()?;
_g.owned = false;
Ok(_g.c_geom.as_ptr())
})
.collect::<Result<Vec<_>>>()?
)
}
}
/// Create a `SFCGeometry` from any geo-type Geometry
impl ToSFCGAL for geo_types::Geometry<f64> {
fn to_sfcgal(&self) -> Result<SFCGeometry> {
match *self {
geo_types::Geometry::Point(ref c) => c.to_sfcgal(),
geo_types::Geometry::Line(ref c) => c.to_sfcgal(),
geo_types::Geometry::LineString(ref c) => c.to_sfcgal(),
geo_types::Geometry::Polygon(ref c) => c.to_sfcgal(),
geo_types::Geometry::MultiPoint(ref c) => c.to_sfcgal(),
geo_types::Geometry::MultiLineString(ref c) => c.to_sfcgal(),
geo_types::Geometry::MultiPolygon(ref c) => c.to_sfcgal(),
geo_types::Geometry::GeometryCollection(ref c) => c.to_sfcgal(),
}
}
}
#[cfg(test)]
mod tests {
use super::TryInto;
use crate::{GeomType, SFCGeometry, ToSFCGAL};
use geo_types::{
Coordinate, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, Triangle,
};
#[test]
fn point_geo_to_sfcgal_to_geo() {
let pt = Point::new(0.1, 0.9);
let pt_sfcgal = pt.to_sfcgal().unwrap();
assert!(pt_sfcgal.is_valid().unwrap());
let pt: Point<f64> = pt_sfcgal.try_into().unwrap().into_point().unwrap();
assert_eq!(pt.x(), 0.1);
assert_eq!(pt.y(), 0.9);
}
#[test]
fn point_sfcgal_try_into_geo() {
let pt_sfcgal = SFCGeometry::new("POINT(0.1 0.9)").unwrap();
let pt: Point<f64> = pt_sfcgal.try_into().unwrap().into_point().unwrap();
assert_ulps_eq!(pt.x(), 0.1);
assert_ulps_eq!(pt.y(), 0.9);
}
#[test]
fn multipoint_geo_to_sfcgal_to_geo() {
let multipt = MultiPoint::from(vec![Point::new(0., 0.), Point::new(1., 1.)]);
let mpt_sfcgal = multipt.to_sfcgal().unwrap();
assert!(mpt_sfcgal.is_valid().unwrap());
let mpt: MultiPoint<f64> = mpt_sfcgal.try_into().unwrap().into_multi_point().unwrap();
assert_eq!(mpt.0[0].x(), 0.);
assert_eq!(mpt.0[0].y(), 0.);
assert_eq!(mpt.0[1].x(), 1.);
assert_eq!(mpt.0[1].y(), 1.);
}
#[test]
fn linestring_geo_to_sfcgal_to_geo() {
let linestring = LineString::from(vec![Point::new(0., 0.), Point::new(1., 1.)]);
let line_sfcgal = linestring.to_sfcgal().unwrap();
assert!(line_sfcgal.is_valid().unwrap());
let linestring_geo: LineString<f64> =
line_sfcgal.try_into().unwrap().into_line_string().unwrap();
assert_eq!(linestring_geo.0[0].x, 0.);
assert_eq!(linestring_geo.0[0].y, 0.);
assert_eq!(linestring_geo.0[1].x, 1.);
assert_eq!(linestring_geo.0[1].y, 1.);
}
#[test]
fn multilinestring_geo_to_sfcgal_to_geo() {
let multilinestring = MultiLineString::from(LineString::from(vec![
Point::new(0., 0.),
Point::new(1., 1.),
]));
let mls_sfcgal = multilinestring.to_sfcgal().unwrap();
assert!(mls_sfcgal.is_valid().unwrap());
let mls: MultiLineString<f64> = mls_sfcgal
.try_into()
.unwrap()
.into_multi_line_string()
.unwrap();
assert_eq!(mls.0[0].0[0].x, 0.);
assert_eq!(mls.0[0].0[0].y, 0.);
assert_eq!(mls.0[0].0[1].x, 1.);
assert_eq!(mls.0[0].0[1].y, 1.);
}
#[test]
fn triangle_geo_to_sfcgal_to_geo() {
let tri = Triangle(
Coordinate::from((0., 0.)),
Coordinate::from((1., 0.)),
Coordinate::from((0.5, 1.)),
);
let tri_sfcgal = tri.to_sfcgal().unwrap();
assert!(tri_sfcgal.is_valid().unwrap());
assert_eq!(tri_sfcgal._type().unwrap(), GeomType::Triangle);
let coords: Result<geo_types::Geometry<f64>, _> = tri_sfcgal.try_into();
assert_eq!(
coords.err().unwrap().to_string(),
"Conversion from SFCGeometry of type `Triangle`, `Solid`, `Multisolid`, \
`Triangulatedsurface` and `Polyhedralsurface` to geo_types::Geometry are not yet implemented!",
)
}
#[test]
fn polygon_geo_to_sfcgal_to_geo() {
let polygon = Polygon::new(
LineString::from(vec![(0., 0.), (1., 0.), (1., 1.), (0., 1.), (0., 0.)]),
vec![LineString::from(vec![
(0.1, 0.1),
(0.1, 0.9),
(0.9, 0.9),
(0.9, 0.1),
(0.1, 0.1),
])],
);
let poly_sfcgal = polygon.to_sfcgal().unwrap();
let polyg: Polygon<f64> = poly_sfcgal.try_into().unwrap().into_polygon().unwrap();
let interiors = polyg.interiors();
assert_eq!(
polyg.exterior(),
&LineString::from(vec![(0., 0.), (1., 0.), (1., 1.), (0., 1.,), (0., 0.)])
);
assert_eq!(interiors[0].0[0].x, 0.1);
assert_eq!(interiors[0].0[0].y, 0.1);
assert_eq!(interiors[0].0[2].x, 0.9);
assert_eq!(interiors[0].0[2].y, 0.9);
assert_eq!(interiors[0].0[3].x, 0.9);
assert_eq!(interiors[0].0[3].y, 0.1);
}
#[test]
fn multipolygon_geo_to_sfcgal_to_geo() {
let multipolygon = MultiPolygon(vec![Polygon::new(
LineString::from(vec![(0., 0.), (1., 0.), (1., 1.), (0., 1.), (0., 0.)]),
vec![LineString::from(vec![
(0.1, 0.1),
(0.1, 0.9),
(0.9, 0.9),
(0.9, 0.1),
(0.1, 0.1),
])],
)]);
let mutlipolygon_sfcgal = multipolygon.to_sfcgal().unwrap();
let mpg: MultiPolygon<f64> = mutlipolygon_sfcgal
.try_into()
.unwrap()
.into_multi_polygon()
.unwrap();
assert_eq!(
mpg.0[0].exterior(),
&LineString::from(vec![(0., 0.), (1., 0.), (1., 1.), (0., 1.,), (0., 0.)])
);
assert_eq!(
mpg.0[0].interiors()[0],
LineString::from(vec![
(0.1, 0.1),
(0.1, 0.9,),
(0.9, 0.9),
(0.9, 0.1),
(0.1, 0.1)
])
);
}
#[test]
fn geometrycollection_sfcgal_to_geo_to_sfcgal() {
let input_wkt = "GEOMETRYCOLLECTION(POINT(4.0 6.0),LINESTRING(4.0 6.0,7.0 10.0))";
let gc_sfcgal = SFCGeometry::new(input_wkt).unwrap();
let gc: geo_types::Geometry<f64> = gc_sfcgal.try_into().unwrap();
if let geo_types::Geometry::GeometryCollection(_gc) = &gc {
assert_eq!(Point::new(4., 6.), _gc.0[0].clone().into_point().unwrap(),);
assert_eq!(
LineString::from(vec![(4., 6.), (7., 10.)]),
_gc.0[1].clone().into_line_string().unwrap(),
);
let gc_sfcgal = _gc.to_sfcgal().unwrap();
assert_eq!(input_wkt, gc_sfcgal.to_wkt_decim(1).unwrap());
} else {
panic!("Error while deconstructing geometrycollection");
}
}
}
| 40.127971 | 155 | 0.554579 |
de7d74cf2319fe9461c4006bdf037d666d11c726 | 1,998 | #![cfg_attr(not(feature = "std"), no_std)]
pub use self::aggregator::ExchangePrices;
use ink_lang as ink;
#[ink::contract]
mod aggregator {
#[cfg(not(feature = "ink-as-dependency"))]
use ink_storage::collections::HashMap as StorageHashMap;
#[ink(storage)]
pub struct ExchangePrices {
/// Mapping from rafts to price of rafts.
prices: StorageHashMap<AccountId, u128>,
}
/// Event emitted when a token transfer occurs.
#[ink(event)]
pub struct Feed {
#[ink(topic)]
rafts: AccountId,
price: u128,
}
impl ExchangePrices {
#[ink(constructor)]
pub fn new() -> Self {
Self {
prices: StorageHashMap::new(),
}
}
/// Returns the price of rafts.
///
/// Returns `0` if the account is non-existent.
#[ink(message)]
pub fn get_price(&self, rafts: AccountId) -> u128 {
*self.prices.get(&rafts).unwrap_or(&0)
}
/// Feed the price of rafts.
/// On success a `Feed` event is emitted.
#[ink(message)]
pub fn feed_price(&mut self, rafts: AccountId, price: u128) {
self.prices.insert(rafts, price);
self.env().emit_event(Feed {
rafts,
price,
});
}
}
/// Unit tests.
#[cfg(test)]
mod tests {
use super::*;
use ink_lang as ink;
/// The default constructor does its job.
#[ink::test]
fn new_works() {
// Constructor works.
let contract = ExchangePrices::new();
assert_eq!(contract.get_price(AccountId::from([0x01; 32])), 0);
}
#[ink::test]
fn get_price_works() {
let mut contract = ExchangePrices::new();
contract.feed_price(AccountId::from([0x01; 32]), 100);
assert_eq!(contract.get_price(AccountId::from([0x01; 32])), 100);
}
}
} | 25.615385 | 77 | 0.525526 |
62a64912eceed990e2fcb4f35206c204b43739d4 | 3,833 | extern crate bindgen;
use std::env;
fn main() {
let out_path = env::current_dir().unwrap();
// For native targets, include all types and functions
bindgen::Builder::default()
.header(
out_path
.join("../spirv_cross/src/wrapper.hpp")
.to_str()
.unwrap(),
)
.clang_args(["-x", "c++", "-std=c++14"].iter())
.enable_cxx_namespaces()
.whitelisted_function("sc_internal.*")
.whitelisted_type("spv::.*")
.bitfield_enum(".*(Mask|Flags)")
.whitelisted_type("SPIRV_CROSS_NAMESPACE::Resource")
.whitelisted_type("SPIRV_CROSS_NAMESPACE::MSLVertexAttr")
.whitelisted_type("SPIRV_CROSS_NAMESPACE::MSLResourceBinding")
// TODO: Simplify with glob
.whitelisted_type("ScInternalCompilerBase")
.whitelisted_type("ScInternalCompilerHlsl")
.whitelisted_type("ScInternalCompilerMsl")
.whitelisted_type("ScInternalCompilerGlsl")
.whitelisted_type("ScInternalResult")
.whitelisted_type("ScEntryPoint")
.whitelisted_type("ScCombinedImageSampler")
.whitelisted_type("ScHlslRootConstant")
.whitelisted_type("ScHlslCompilerOptions")
.whitelisted_type("ScMslCompilerOptions")
.whitelisted_type("ScGlslCompilerOptions")
.whitelisted_type("ScResource")
.whitelisted_type("ScResourceArray")
.whitelisted_type("ScShaderResources")
.whitelisted_type("ScSpecializationConstant")
.whitelisted_type("ScType")
.opaque_type("std::.*")
.clang_args(vec![
"-DSPIRV_CROSS_WRAPPER_GLSL",
"-DSPIRV_CROSS_WRAPPER_MSL",
"-DSPIRV_CROSS_WRAPPER_HLSL",
])
.layout_tests(false)
.generate()
.expect("Unable to generate bindings")
.write_to_file(out_path.join("../spirv_cross/src/bindings_native.rs"))
.expect("Couldn't write bindings!");
// For wasm targets, include all types, functions will be implemented manually
bindgen::Builder::default()
.header(
out_path
.join("../spirv_cross/src/wrapper.hpp")
.to_str()
.unwrap(),
)
.clang_args(["-x", "c++", "-std=c++14"].iter())
.enable_cxx_namespaces()
.whitelisted_type("spv::.*")
.bitfield_enum(".*(Mask|Flags)")
.whitelisted_type("SPIRV_CROSS_NAMESPACE::Resource")
.whitelisted_type("SPIRV_CROSS_NAMESPACE::MSLVertexAttr")
.whitelisted_type("SPIRV_CROSS_NAMESPACE::MSLResourceBinding")
// TODO: Simplify with glob
.whitelisted_type("ScInternalCompilerBase")
.whitelisted_type("ScInternalCompilerHlsl")
.whitelisted_type("ScInternalCompilerMsl")
.whitelisted_type("ScInternalCompilerGlsl")
.whitelisted_type("ScInternalResult")
.whitelisted_type("ScEntryPoint")
.whitelisted_type("ScCombinedImageSampler")
.whitelisted_type("ScHlslRootConstant")
.whitelisted_type("ScHlslCompilerOptions")
.whitelisted_type("ScMslCompilerOptions")
.whitelisted_type("ScGlslCompilerOptions")
.whitelisted_type("ScResource")
.whitelisted_type("ScResourceArray")
.whitelisted_type("ScShaderResources")
.whitelisted_type("ScSpecializationConstant")
.whitelisted_type("ScType")
.opaque_type("std::.*")
.clang_args(vec![
"-DSPIRV_CROSS_WRAPPER_GLSL",
"-DSPIRV_CROSS_WRAPPER_MSL",
"-DSPIRV_CROSS_WRAPPER_HLSL",
])
.layout_tests(false)
.generate()
.expect("Unable to generate bindings")
.write_to_file(out_path.join("../spirv_cross/src/bindings_wasm.rs"))
.expect("Couldn't write bindings!");
}
| 40.347368 | 82 | 0.632142 |
115ff5d4bfccf9a523712535171a9f6e36214599 | 1,018 | // structs1.rs
// Address all the TODOs to make the tests pass!
struct ColorClassicStruct {
name: String,
hex: String,
}
struct ColorTupleStruct(String, String);
#[derive(Debug)]
struct UnitStruct;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn classic_c_structs() {
// TODO: Instantiate a classic c struct!
let green = ColorClassicStruct{name: "green".to_owned(), hex: "#00FF00".to_owned()};
assert_eq!(green.name, "green");
assert_eq!(green.hex, "#00FF00");
}
#[test]
fn tuple_structs() {
// TODO: Instantiate a tuple struct!
let green = ColorTupleStruct("green".to_owned(), "#00FF00".to_owned());
assert_eq!(green.0, "green");
assert_eq!(green.1, "#00FF00");
}
#[test]
fn unit_structs() {
// TODO: Instantiate a unit struct!
let unit_struct = UnitStruct;
let message = format!("{:?}s are fun!", unit_struct);
assert_eq!(message, "UnitStructs are fun!");
}
}
| 22.622222 | 92 | 0.594303 |
9c547e87215e656853f37e8c5d603658a50c8205 | 2,743 | use std::{thread, time::Duration};
use naia_server::{Event, Server as NaiaServer, ServerAddrs, ServerConfig};
use naia_tickless_demo_shared::{get_shared_config, Protocol, Text};
use naia_empty_world::{EmptyEntity, EmptyWorldRef};
type Server = NaiaServer<Protocol, EmptyEntity>;
pub struct App {
server: Server,
}
impl App {
pub fn new() -> Self {
info!("Naia Tickless Server Demo started");
let server_addresses = ServerAddrs::new(
"127.0.0.1:14191"
.parse()
.expect("could not parse session address/port"),
// IP Address to listen on for UDP WebRTC data channels
"127.0.0.1:14192"
.parse()
.expect("could not parse WebRTC data address/port"),
// The public WebRTC IP address to advertise
"http://127.0.0.1:14192"
);
let mut server_config = ServerConfig::default();
server_config.require_auth = false;
server_config.disconnection_timeout_duration = Duration::from_secs(30);
let mut server = Server::new(server_config, get_shared_config());
server.listen(server_addresses);
App { server }
}
pub fn update(&mut self) {
for event in self.server.receive() {
match event {
Ok(Event::Connection(user_key)) => {
let user_address = self.server.user(&user_key).address();
info!("Naia Server connected to: {}", user_address);
}
Ok(Event::Disconnection(_, user)) => {
info!("Naia Server disconnected from: {}", user.address);
}
Ok(Event::Message(user_key, Protocol::Text(text))) => {
let client_message = text.value.get();
info!("Server recv <- {}", client_message);
let new_message_contents = format!("Server Message ({})", client_message);
info!("Server echo -> {}", new_message_contents);
let message = Text::new(&new_message_contents);
self.server.send_message(&user_key, &message, true);
// Sleep the thread to keep the demo from being unintelligibly fast
let sleep_time = Duration::from_millis(500);
thread::sleep(sleep_time);
}
Ok(Event::Tick) => {
info!("TICK SHOULD NOT HAPPEN!");
}
Err(error) => {
info!("Naia Server error: {}", error);
}
_ => {}
}
}
self.server.send_all_updates(EmptyWorldRef::new());
}
}
| 34.721519 | 94 | 0.532629 |
db10bfb9f3883b99c4fee56b15c4d9b8108caef0 | 11,288 | use wasm_bindgen::prelude::*;
use std::collections::HashMap;
use super::color_conversion;
use wasm_bindgen::JsValue;
use crate::p5_wasm::P5Wasm;
// color/p5.Color.js
#[wasm_bindgen]
pub struct Color {
array: Vec<f64>,
pub(crate) levels: Vec<f64>,
mode: String,
maxes: HashMap< String, Vec<f64> >,
pub(crate) hsba: Option< Vec<f64> >,
pub(crate) hsla: Option< Vec<f64> >,
}
#[wasm_bindgen]
impl Color {
pub fn new(inst: &P5Wasm, v1: JsValue, v2: JsValue, v3: JsValue, v4: JsValue) -> Color {
if inst.color_mode != "rgb" && inst.color_mode != "hsb" && inst.color_mode != "hsl" {
panic!("{} is an invalid colorMode.", inst.color_mode);
}
let arr = parse_inputs(inst.color_mode.clone(), inst.color_maxes.clone(), v1, v2, v3, v4);
Color {
array: arr.clone(),
levels: calculate_levels(&arr),
mode: inst.color_mode.clone(),
maxes: inst.color_maxes.clone(),
hsba: None,
hsla: None,
}
}
pub fn to_string(&mut self, f: JsValue) -> String {
let format;
let val;
if f.is_undefined() {
format = "rgba";
} else if f.is_string() {
val = f.as_string().unwrap();
format = &val;
} else {
panic!("Invalid value passed to to_string() function");
}
let arr = &self.array;
match format {
"#rrggbb" => {
let r = match Some(format_radix(arr[0] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
let g = match Some(format_radix(arr[1] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
let b = match Some(format_radix(arr[2] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
format!("#{}{}{}", r, g, b)
}
"#rrggbbaa" => {
let r = match Some(format_radix(arr[0] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
let g = match Some(format_radix(arr[1] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
let b = match Some(format_radix(arr[2] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
let a = match Some(format_radix(arr[3] as u32 * 255, 16)) {
Some(x) if x.chars().count() < 2 => "0".to_owned() + &x,
Some(x) => x,
None => String::from(""),
};
format!("#{}{}{}{}", r, g, b, a)
}
"#rgb" => {
let r = format_radix(arr[0].round() as u32 * 15, 16);
let g = format_radix(arr[1].round() as u32 * 15, 16);
let b = format_radix(arr[2].round() as u32 * 15, 16);
format!("#{}{}{}", r, g, b)
}
"#rgba" => {
let r = format_radix(arr[0].round() as u32 * 15, 16);
let g = format_radix(arr[1].round() as u32 * 15, 16);
let b = format_radix(arr[2].round() as u32 * 15, 16);
let a = format_radix(arr[3].round() as u32 * 15, 16);
format!("#{}{}{}{}", r, g, b, a)
}
"rgb" => {
format!("rgb({}, {}, {})", arr[0] * 255.0, arr[1] * 255.0, arr[2] * 255.0)
}
"rgb%" => {
let r = (arr[0] * 100.0).to_precision(3);
let g = (arr[1] * 100.0).to_precision(3);
let b = (arr[2] * 100.0).to_precision(3);
format!("rgb({}%, {}%, {}%)", r, g, b)
}
"rgba%" => {
let r = (arr[0] * 100.0).to_precision(3);
let g = (arr[1] * 100.0).to_precision(3);
let b = (arr[2] * 100.0).to_precision(3);
let a = (arr[2] * 100.0).to_precision(3);
format!("rgba({}%, {}%, {}%, {}%)", r, g, b, a)
}
"hsb" | "hsv" => {
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
let maxes = self.maxes.get("hsb").unwrap();
format!("hsb({}, {}, {})", hsba[0] * maxes[0], hsba[1] * maxes[1], hsba[2] * maxes[2])
}
"hsb%" | "hsv%" => {
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
let h = (hsba[0] * 100.0).to_precision(3);
let s = (hsba[1] * 100.0).to_precision(3);
let b = (hsba[2] * 100.0).to_precision(3);
format!("hsb({}%, {}%, {}%)", h, s, b)
}
"hsba" | "hsva" => {
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
let maxes = self.maxes.get("hsb").unwrap();
format!("hsba({}, {}, {}, {})", hsba[0] * maxes[0], hsba[1] * maxes[1], hsba[2] * maxes[2], hsba[3] * maxes[3])
}
"hsba%" | "hsva%" => {
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
let h = (hsba[0] * 100.0).to_precision(3);
let s = (hsba[1] * 100.0).to_precision(3);
let b = (hsba[2] * 100.0).to_precision(3);
let a = (arr[2] * 100.0).to_precision(3);
format!("hsba({}%, {}%, {}%, {}%)", h, s, b, a)
}
"hsl" => {
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
let maxes = self.maxes.get("hsl").unwrap();
format!("hsl({}, {}, {})", hsla[0] * maxes[0], hsla[1] * maxes[1], hsla[2] * maxes[2])
}
"hsl%" => {
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
let h = (hsla[0] * 100.0).to_precision(3);
let s = (hsla[1] * 100.0).to_precision(3);
let l = (hsla[2] * 100.0).to_precision(3);
format!("hsl({}%, {}%, {}%)", h, s, l)
}
"hsla" => {
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
let maxes = self.maxes.get("hsl").unwrap();
format!("hsla({}, {}, {}, {})", hsla[0] * maxes[0], hsla[1] * maxes[1], hsla[2] * maxes[2], hsla[3] * maxes[3])
}
"hsla%" => {
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
let h = (hsla[0] * 100.0).to_precision(3);
let s = (hsla[1] * 100.0).to_precision(3);
let l = (hsla[2] * 100.0).to_precision(3);
let a = (arr[2] * 100.0).to_precision(3);
format!("hsla({}%, {}%, {}%, {}%)", h, s, l, a)
}
"rgba" | _ => {
format!("rgba({}, {}, {}, {})", (arr[0] * 255.0).round(), (arr[1] * 255.0).round(), (arr[2] * 255.0).round(), arr[3])
}
}
}
pub fn set_red(&mut self, new_red: f64) {
self.array[0] = new_red / self.maxes.get("rgb").unwrap()[0];
self.levels = calculate_levels(&self.array);
}
pub fn set_green(&mut self, new_green: f64) {
self.array[1] = new_green / self.maxes.get("rgb").unwrap()[1];
self.levels = calculate_levels(&self.array);
}
pub fn set_blue(&mut self, new_blue: f64) {
self.array[2] = new_blue / self.maxes.get("rgb").unwrap()[2];
self.levels = calculate_levels(&self.array);
}
pub fn set_alpha(&mut self, new_alpha: f64) {
self.array[3] = new_alpha / self.maxes.get("rgb").unwrap()[3];
self.levels = calculate_levels(&self.array);
}
pub fn red(&self) -> f64 {
self.array[0] * self.maxes.get("rgb").unwrap()[0]
}
pub fn green(&self) -> f64 {
self.array[1] * self.maxes.get("rgb").unwrap()[1]
}
pub fn blue(&self) -> f64 {
self.array[2] * self.maxes.get("rgb").unwrap()[2]
}
pub fn alpha(&self) -> f64 {
self.array[3] * self.maxes.get(&self.mode).unwrap()[3]
}
pub fn hue(&mut self) -> f64 {
let arr = &self.array;
if self.mode == "hsb" {
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
return hsba[0] * self.maxes.get("hsb").unwrap()[0];
} else {
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
return hsla[0] * self.maxes.get("hsb").unwrap()[0];
}
}
pub fn saturation(&mut self) -> f64 {
let arr = &self.array;
if self.mode == "hsb" {
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
return hsba[1] * self.maxes.get("hsb").unwrap()[1];
} else {
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
return hsla[1] * self.maxes.get("hsb").unwrap()[1];
}
}
pub fn brightness(&mut self) -> f64 {
let arr = &self.array;
if self.hsba.is_none() {
self.hsba = Some(color_conversion::rgba_to_hsba(arr.to_vec()));
}
let hsba = self.hsba.as_ref().unwrap();
return hsba[2] * self.maxes.get("hsb").unwrap()[2];
}
pub fn lightness(&mut self) -> f64 {
let arr = &self.array;
if self.hsla.is_none() {
self.hsla = Some(color_conversion::rgba_to_hsla(arr.to_vec()));
}
let hsla = self.hsla.as_ref().unwrap();
return hsla[2] * self.maxes.get("hsl").unwrap()[2];
}
}
fn parse_inputs(mode: String, maxes:HashMap< String, Vec<f64> >, r: JsValue, g: JsValue, b: JsValue, a: JsValue) -> Vec<f64> {
let maxes = maxes.get(&mode).unwrap();
let mut results: Vec<f64> = vec!();
if g.is_undefined() {
// One argument
} else if b.is_undefined() {
// Two arguments
} else {
// Three or four arguments
results.push((r.as_f64().unwrap() / maxes[0]).constrain(0.0, 1.0));
results.push((g.as_f64().unwrap() / maxes[1]).constrain(0.0, 1.0));
results.push((b.as_f64().unwrap() / maxes[2]).constrain(0.0, 1.0));
if a.is_undefined() {
results.push(1.0);
} else {
results.push((a.as_f64().unwrap() / maxes[3]).constrain(0.0, 1.0));
}
// Convert from current color mode to RGBA
if mode == "hsl" {
results = color_conversion::hsla_to_rgba(results);
} else if mode == "hsb" {
results = color_conversion::hsba_to_rgba(results);
}
}
results
}
fn calculate_levels(arr: &Vec<f64>) -> Vec<f64> {
arr.iter()
.map(|x| {
(x * 255.0).round()
})
.collect()
}
fn format_radix(mut x: u32, radix: u32) -> String {
let mut result = vec![];
loop {
let m = x % radix;
x = x / radix;
// will panic if you use a bad radix (< 2 or > 36).
result.push(std::char::from_digit(m, radix).unwrap());
if x == 0 {
break;
}
}
result.into_iter().rev().collect()
}
trait ToPrecision {
fn to_precision(&self, n: u32) -> String;
}
impl ToPrecision for f64 {
fn to_precision(&self, n: u32) -> String {
if *self == 0.0 {
return 0.0.to_string();
}
let d = self.abs().log10().ceil();
let power = n - d as u32;
let magnitude = 10_i32.pow(power);
let shifted = (*self * magnitude as f64).round();
let ret = shifted / magnitude as f64;
ret.to_string()
}
}
trait Constrain {
fn constrain(&self, min: f64, max: f64) -> f64;
}
impl Constrain for f64 {
fn constrain(&self, min: f64, max: f64) -> f64 {
let mut result = self;
if self < &min {
result = &min;
} else if self > &max {
result = &max;
}
*result
}
} | 27.46472 | 126 | 0.557938 |
cce0f1023729fe944e72bfe36e5b3664a399ea66 | 4,996 | use super::*;
use crate::test::freeze_timeout;
#[test]
fn with_different_process_sends_message_when_timer_expires() {
TestRunner::new(Config::with_source_file(file!()))
.run(
&(milliseconds(), strategy::process()).prop_flat_map(|(milliseconds, arc_process)| {
(
Just(milliseconds),
Just(arc_process.clone()),
strategy::term(arc_process),
)
}),
|(milliseconds, arc_process, message)| {
let time = arc_process.integer(milliseconds).unwrap();
let destination_arc_process = test::process::child(&arc_process);
let destination = destination_arc_process.pid_term();
let options = options(&arc_process);
let start_time_in_milliseconds = freeze_timeout();
let result = native(arc_process.clone(), time, destination, message, options);
prop_assert!(
result.is_ok(),
"Timer reference not returned. Got {:?}",
result
);
let timer_reference = result.unwrap();
prop_assert!(timer_reference.is_boxed_local_reference());
let timeout_message = arc_process
.tuple_from_slice(&[Atom::str_to_term("timeout"), timer_reference, message])
.unwrap();
prop_assert!(!has_message(&destination_arc_process, timeout_message));
freeze_at_timeout(start_time_in_milliseconds + milliseconds + 1);
prop_assert!(has_message(&destination_arc_process, timeout_message));
Ok(())
},
)
.unwrap();
}
#[test]
fn with_same_process_sends_message_when_timer_expires() {
TestRunner::new(Config::with_source_file(file!()))
.run(
&(milliseconds(), strategy::process()).prop_flat_map(|(milliseconds, arc_process)| {
(
Just(milliseconds),
Just(arc_process.clone()),
strategy::term(arc_process),
)
}),
|(milliseconds, arc_process, message)| {
let time = arc_process.integer(milliseconds).unwrap();
let destination = arc_process.pid_term();
let options = options(&arc_process);
let start_time_in_milliseconds = freeze_timeout();
let result = native(arc_process.clone(), time, destination, message, options);
prop_assert!(
result.is_ok(),
"Timer reference not returned. Got {:?}",
result
);
let timer_reference = result.unwrap();
prop_assert!(timer_reference.is_boxed_local_reference());
let timeout_message = arc_process
.tuple_from_slice(&[Atom::str_to_term("timeout"), timer_reference, message])
.unwrap();
prop_assert!(!has_message(&arc_process, timeout_message));
freeze_at_timeout(start_time_in_milliseconds + milliseconds + 1);
prop_assert!(has_message(&arc_process, timeout_message));
Ok(())
},
)
.unwrap();
}
#[test]
fn without_process_sends_nothing_when_timer_expires() {
TestRunner::new(Config::with_source_file(file!()))
.run(
&(milliseconds(), strategy::process()).prop_flat_map(|(milliseconds, arc_process)| {
(
Just(milliseconds),
Just(arc_process.clone()),
strategy::term(arc_process),
)
}),
|(milliseconds, arc_process, message)| {
let time = arc_process.integer(milliseconds).unwrap();
let destination = Pid::next_term();
let options = options(&arc_process);
let start_time_in_milliseconds = freeze_timeout();
let result = native(arc_process.clone(), time, destination, message, options);
prop_assert!(
result.is_ok(),
"Timer reference not returned. Got {:?}",
result
);
let timer_reference = result.unwrap();
prop_assert!(timer_reference.is_boxed_local_reference());
let timeout_message = arc_process
.tuple_from_slice(&[Atom::str_to_term("timeout"), timer_reference, message])
.unwrap();
prop_assert!(!has_message(&arc_process, timeout_message));
freeze_at_timeout(start_time_in_milliseconds + milliseconds + 1);
prop_assert!(!has_message(&arc_process, timeout_message));
Ok(())
},
)
.unwrap();
}
| 34.455172 | 96 | 0.533026 |
d9f32435326887a7255ad8bcb9eadd9027e6fb87 | 3,658 | //! Program state processor
use {
crate::{
instruction::AssociatedTokenAccountInstruction,
tools::account::{create_pda_account, get_account_len},
*,
},
borsh::BorshDeserialize,
solana_program::{
account_info::{next_account_info, AccountInfo},
entrypoint::ProgramResult,
msg,
program::invoke,
program_error::ProgramError,
pubkey::Pubkey,
rent::Rent,
sysvar::Sysvar,
},
};
/// Instruction processor
pub fn process_instruction(
program_id: &Pubkey,
accounts: &[AccountInfo],
input: &[u8],
) -> ProgramResult {
let instruction = if input.is_empty() {
AssociatedTokenAccountInstruction::Create
} else {
AssociatedTokenAccountInstruction::try_from_slice(input)
.map_err(|_| ProgramError::InvalidInstructionData)?
};
msg!("{:?}", instruction);
match instruction {
AssociatedTokenAccountInstruction::Create {} => {
process_create_associated_token_account(program_id, accounts)
}
}
}
/// Processes CreateAssociatedTokenAccount instruction
pub fn process_create_associated_token_account(
program_id: &Pubkey,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let funder_info = next_account_info(account_info_iter)?;
let associated_token_account_info = next_account_info(account_info_iter)?;
let wallet_account_info = next_account_info(account_info_iter)?;
let spl_token_mint_info = next_account_info(account_info_iter)?;
let system_program_info = next_account_info(account_info_iter)?;
let spl_token_program_info = next_account_info(account_info_iter)?;
let spl_token_program_id = spl_token_program_info.key;
let rent = Rent::get()?;
let (associated_token_address, bump_seed) = get_associated_token_address_and_bump_seed_internal(
wallet_account_info.key,
spl_token_mint_info.key,
program_id,
spl_token_program_id,
);
if associated_token_address != *associated_token_account_info.key {
msg!("Error: Associated address does not match seed derivation");
return Err(ProgramError::InvalidSeeds);
}
let associated_token_account_signer_seeds: &[&[_]] = &[
&wallet_account_info.key.to_bytes(),
&spl_token_program_id.to_bytes(),
&spl_token_mint_info.key.to_bytes(),
&[bump_seed],
];
let account_len = get_account_len(
spl_token_mint_info,
spl_token_program_info,
&[spl_token::extension::ExtensionType::ImmutableOwner],
)?;
create_pda_account(
funder_info,
&rent,
account_len,
spl_token_program_id,
system_program_info,
associated_token_account_info,
associated_token_account_signer_seeds,
)?;
msg!("Initialize the associated token account");
invoke(
&spl_token::instruction::initialize_immutable_owner(
spl_token_program_id,
associated_token_account_info.key,
)?,
&[
associated_token_account_info.clone(),
spl_token_program_info.clone(),
],
)?;
invoke(
&spl_token::instruction::initialize_account3(
spl_token_program_id,
associated_token_account_info.key,
spl_token_mint_info.key,
wallet_account_info.key,
)?,
&[
associated_token_account_info.clone(),
spl_token_mint_info.clone(),
wallet_account_info.clone(),
spl_token_program_info.clone(),
],
)
}
| 30.231405 | 100 | 0.66047 |
79b16622632eff50b265bc94abc2062f3ac14cd6 | 8,250 | //! This module provides a token source (`GetToken`) that obtains tokens for service accounts.
//! Service accounts are usually used by software (i.e., non-human actors) to get access to
//! resources. Currently, this module only works with RS256 JWTs, which makes it at least suitable for
//! authentication with Google services.
//!
//! Resources:
//! - [Using OAuth 2.0 for Server to Server
//! Applications](https://developers.google.com/identity/protocols/OAuth2ServiceAccount)
//! - [JSON Web Tokens](https://jwt.io/)
//!
//! Copyright (c) 2016 Google Inc ([email protected]).
//!
use crate::error::Error;
use crate::types::TokenInfo;
use std::io;
use actix_web::client as awc;
use rustls::{
self,
internal::pemfile,
sign::{self, SigningKey},
PrivateKey,
};
use serde::{Deserialize, Serialize};
use url::form_urlencoded;
const GRANT_TYPE: &str = "urn:ietf:params:oauth:grant-type:jwt-bearer";
const GOOGLE_RS256_HEAD: &str = r#"{"alg":"RS256","typ":"JWT"}"#;
/// Encodes s as Base64
fn append_base64<T: AsRef<[u8]> + ?Sized>(
s: &T,
out: &mut String,
) {
base64::encode_config_buf(s, base64::URL_SAFE, out)
}
/// Decode a PKCS8 formatted RSA key.
fn decode_rsa_key(pem_pkcs8: &str) -> Result<PrivateKey, io::Error> {
let private_keys = pemfile::pkcs8_private_keys(&mut pem_pkcs8.as_bytes());
match private_keys {
Ok(mut keys) if !keys.is_empty() => {
keys.truncate(1);
Ok(keys.remove(0))
}
Ok(_) => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Not enough private keys in PEM",
)),
Err(_) => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Error reading key from PEM",
)),
}
}
/// JSON schema of secret service account key. You can obtain the key from
/// the Cloud Console at https://console.cloud.google.com/.
///
/// You can use `helpers::read_service_account_key()` as a quick way to read a JSON client
/// secret into a ServiceAccountKey.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ServiceAccountKey {
#[serde(rename = "type")]
/// key_type
pub key_type: Option<String>,
/// project_id
pub project_id: Option<String>,
/// private_key_id
pub private_key_id: Option<String>,
/// private_key
pub private_key: String,
/// client_email
pub client_email: String,
/// client_id
pub client_id: Option<String>,
/// auth_uri
pub auth_uri: Option<String>,
/// token_uri
pub token_uri: String,
/// auth_provider_x509_cert_url
pub auth_provider_x509_cert_url: Option<String>,
/// client_x509_cert_url
pub client_x509_cert_url: Option<String>,
}
/// Permissions requested for a JWT.
/// See https://developers.google.com/identity/protocols/OAuth2ServiceAccount#authorizingrequests.
#[derive(Serialize, Debug)]
struct Claims<'a> {
iss: &'a str,
aud: &'a str,
exp: i64,
iat: i64,
#[serde(rename = "sub")]
subject: Option<&'a str>,
scope: String,
}
impl<'a> Claims<'a> {
fn new<T>(
key: &'a ServiceAccountKey,
scopes: &[T],
subject: Option<&'a str>,
) -> Self
where
T: AsRef<str>,
{
let iat = chrono::Utc::now().timestamp();
let expiry = iat + 3600 - 5; // Max validity is 1h.
let scope = crate::helper::join(scopes, " ");
Claims {
iss: &key.client_email,
aud: &key.token_uri,
exp: expiry,
iat,
subject,
scope,
}
}
}
/// A JSON Web Token ready for signing.
pub(crate) struct JWTSigner {
signer: Box<dyn rustls::sign::Signer>,
}
impl JWTSigner {
fn new(private_key: &str) -> Result<Self, io::Error> {
let key = decode_rsa_key(private_key)?;
let signing_key = sign::RSASigningKey::new(&key)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "Couldn't initialize signer"))?;
let signer = signing_key
.choose_scheme(&[rustls::SignatureScheme::RSA_PKCS1_SHA256])
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Couldn't choose signing scheme"))?;
Ok(JWTSigner { signer })
}
fn sign_claims(
&self,
claims: &Claims,
) -> Result<String, rustls::TLSError> {
let mut jwt_head = Self::encode_claims(claims);
let signature = self.signer.sign(jwt_head.as_bytes())?;
jwt_head.push_str(".");
append_base64(&signature, &mut jwt_head);
Ok(jwt_head)
}
/// Encodes the first two parts (header and claims) to base64 and assembles them into a form
/// ready to be signed.
fn encode_claims(claims: &Claims) -> String {
let mut head = String::new();
append_base64(GOOGLE_RS256_HEAD, &mut head);
head.push_str(".");
append_base64(&serde_json::to_string(&claims).unwrap(), &mut head);
head
}
}
pub struct ServiceAccountFlowOpts {
pub(crate) key: ServiceAccountKey,
pub(crate) subject: Option<String>,
}
/// ServiceAccountFlow can fetch oauth tokens using a service account.
pub struct ServiceAccountFlow {
key: ServiceAccountKey,
subject: Option<String>,
signer: JWTSigner,
}
impl ServiceAccountFlow {
pub(crate) fn new(opts: ServiceAccountFlowOpts) -> Result<Self, io::Error> {
let signer = JWTSigner::new(&opts.key.private_key)?;
Ok(ServiceAccountFlow {
key: opts.key,
subject: opts.subject,
signer,
})
}
/// Send a request for a new Bearer token to the OAuth provider.
pub(crate) async fn token<T>(
&self,
client: &awc::Client,
scopes: &[T],
) -> Result<TokenInfo, Error>
where
T: AsRef<str>,
{
let claims = Claims::new(&self.key, scopes, self.subject.as_ref().map(|x| x.as_str()));
let signed = self.signer.sign_claims(&claims).map_err(|_| {
Error::LowLevelError(io::Error::new(
io::ErrorKind::Other,
"unable to sign claims",
))
})?;
let rqbody = form_urlencoded::Serializer::new(String::new())
.extend_pairs(&[("grant_type", GRANT_TYPE), ("assertion", signed.as_str())])
.finish();
let mut resp = client
.post(&self.key.token_uri)
.header("Content-Type", "application/x-www-form-urlencoded")
.send_body(rqbody)
.await?;
let body = resp.body().await?;
log::debug!("received response; body: {:?}", body.as_ref());
TokenInfo::from_json(body.as_ref())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helper::read_service_account_key;
use hyper_rustls::HttpsConnector;
// Valid but deactivated key.
const TEST_PRIVATE_KEY_PATH: &'static str = "examples/Sanguine-69411a0c0eea.json";
// Uncomment this test to verify that we can successfully obtain tokens.
//#[tokio::test]
#[allow(dead_code)]
async fn test_service_account_e2e() {
let key = read_service_account_key(TEST_PRIVATE_KEY_PATH)
.await
.unwrap();
let acc = ServiceAccountFlow::new(ServiceAccountFlowOpts { key, subject: None }).unwrap();
let https = HttpsConnector::new();
let client = hyper::Client::builder()
.keep_alive(false)
.build::<_, hyper::Body>(https);
println!(
"{:?}",
acc
.token(&client, &["https://www.googleapis.com/auth/pubsub"])
.await
);
}
#[tokio::test]
async fn test_jwt_initialize_claims() {
let key = read_service_account_key(TEST_PRIVATE_KEY_PATH)
.await
.unwrap();
let scopes = vec!["scope1", "scope2", "scope3"];
let claims = Claims::new(&key, &scopes, None);
assert_eq!(
claims.iss,
"oauth2-public-test@sanguine-rhythm-105020.iam.gserviceaccount.com".to_string()
);
assert_eq!(claims.scope, "scope1 scope2 scope3".to_string());
assert_eq!(
claims.aud,
"https://accounts.google.com/o/oauth2/token".to_string()
);
assert!(claims.exp > 1000000000);
assert!(claims.iat < claims.exp);
assert_eq!(claims.exp - claims.iat, 3595);
}
#[tokio::test]
async fn test_jwt_sign() {
let key = read_service_account_key(TEST_PRIVATE_KEY_PATH)
.await
.unwrap();
let scopes = vec!["scope1", "scope2", "scope3"];
let signer = JWTSigner::new(&key.private_key).unwrap();
let claims = Claims::new(&key, &scopes, None);
let signature = signer.sign_claims(&claims);
assert!(signature.is_ok());
let signature = signature.unwrap();
assert_eq!(
signature.split(".").nth(0).unwrap(),
"eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9"
);
}
}
| 28.745645 | 102 | 0.653818 |
0ea494851a8e8f86e11d5a8cb15f3d7e598ad672 | 8,510 | use pairing::{
CurveAffine,
CurveProjective,
Engine,
PrimeField,
Field,
PrimeFieldRepr
};
use std::sync::Arc;
use std::io;
use bit_vec::{self, BitVec};
use std::iter;
// use futures::{Future};
use super::multicore::Worker;
use super::SynthesisError;
/// An object that builds a source of bases.
pub trait SourceBuilder<G: CurveAffine>: Send + Sync + 'static + Clone {
type Source: Source<G>;
fn new(self) -> Self::Source;
}
/// A source of bases, like an iterator.
pub trait Source<G: CurveAffine> {
/// Parses the element from the source. Fails if the point is at infinity.
fn add_assign_mixed(&mut self, to: &mut <G as CurveAffine>::Projective) -> Result<(), SynthesisError>;
/// Skips `amt` elements from the source, avoiding deserialization.
fn skip(&mut self, amt: usize) -> Result<(), SynthesisError>;
}
impl<G: CurveAffine> SourceBuilder<G> for (Arc<Vec<G>>, usize) {
type Source = (Arc<Vec<G>>, usize);
fn new(self) -> (Arc<Vec<G>>, usize) {
(self.0.clone(), self.1)
}
}
impl<G: CurveAffine> Source<G> for (Arc<Vec<G>>, usize) {
fn add_assign_mixed(&mut self, to: &mut <G as CurveAffine>::Projective) -> Result<(), SynthesisError> {
if self.0.len() <= self.1 {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "expected more bases from source").into());
}
if self.0[self.1].is_zero() {
return Err(SynthesisError::UnexpectedIdentity)
}
to.add_assign_mixed(&self.0[self.1]);
self.1 += 1;
Ok(())
}
fn skip(&mut self, amt: usize) -> Result<(), SynthesisError> {
if self.0.len() <= self.1 {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "expected more bases from source").into());
}
self.1 += amt;
Ok(())
}
}
pub trait QueryDensity {
/// Returns whether the base exists.
type Iter: Iterator<Item=bool>;
fn iter(self) -> Self::Iter;
fn get_query_size(self) -> Option<usize>;
}
#[derive(Clone)]
pub struct FullDensity;
impl AsRef<FullDensity> for FullDensity {
fn as_ref(&self) -> &FullDensity {
self
}
}
impl<'a> QueryDensity for &'a FullDensity {
type Iter = iter::Repeat<bool>;
fn iter(self) -> Self::Iter {
iter::repeat(true)
}
fn get_query_size(self) -> Option<usize> {
None
}
}
pub struct DensityTracker {
bv: BitVec,
total_density: usize
}
impl<'a> QueryDensity for &'a DensityTracker {
type Iter = bit_vec::Iter<'a>;
fn iter(self) -> Self::Iter {
self.bv.iter()
}
fn get_query_size(self) -> Option<usize> {
Some(self.bv.len())
}
}
impl DensityTracker {
pub fn new() -> DensityTracker {
DensityTracker {
bv: BitVec::new(),
total_density: 0
}
}
pub fn add_element(&mut self) {
self.bv.push(false);
}
pub fn inc(&mut self, idx: usize) {
if !self.bv.get(idx).unwrap() {
self.bv.set(idx, true);
self.total_density += 1;
}
}
pub fn get_total_density(&self) -> usize {
self.total_density
}
}
fn multiexp_inner<Q, D, G, S>(
pool: &Worker,
bases: S,
density_map: D,
exponents: Arc<Vec<<<G::Engine as Engine>::Fr as PrimeField>::Repr>>,
mut skip: u32,
c: u32,
handle_trivial: bool
) -> Box<<G as CurveAffine>::Projective>
where for<'a> &'a Q: QueryDensity,
D: Send + Sync + 'static + Clone + AsRef<Q>,
G: CurveAffine,
S: SourceBuilder<G>
{
// Perform this region of the multiexp
let this = {
let bases = bases.clone();
let exponents = exponents.clone();
let density_map = density_map.clone();
let t = move || -> Result<<G as CurveAffine>::Projective, SynthesisError> {
// Accumulate the result
let mut acc = G::Projective::zero();
// Build a source for the bases
let mut bases = bases.new();
// Create space for the buckets
let mut buckets = vec![<G as CurveAffine>::Projective::zero(); (1 << c) - 1];
let zero = <G::Engine as Engine>::Fr::zero().into_repr();
let one = <G::Engine as Engine>::Fr::one().into_repr();
// Sort the bases into buckets
for (&exp, density) in exponents.iter().zip(density_map.as_ref().iter()) {
if density {
if exp == zero {
bases.skip(1)?;
} else if exp == one {
if handle_trivial {
bases.add_assign_mixed(&mut acc)?;
} else {
bases.skip(1)?;
}
} else {
let mut exp = exp;
exp.shr(skip);
let exp = exp.as_ref()[0] % (1 << c);
if exp != 0 {
bases.add_assign_mixed(&mut buckets[(exp - 1) as usize])?;
} else {
bases.skip(1)?;
}
}
}
}
// Summation by parts
// e.g. 3a + 2b + 1c = a +
// (a) + b +
// ((a) + b) + c
let mut running_sum = G::Projective::zero();
for exp in buckets.into_iter().rev() {
running_sum.add_assign(&exp);
acc.add_assign(&running_sum);
}
Ok(acc)
};
t().unwrap()
};
skip += c;
if skip >= <G::Engine as Engine>::Fr::NUM_BITS {
// There isn't another region.
Box::new(this)
} else {
// There's another region more significant. Calculate and join it with
// this region recursively.
let mut second = multiexp_inner(pool, bases, density_map, exponents, skip, c, false);
for _ in 0..c {
second.double();
}
second.add_assign(&this);
Box::new(
*second
// this.join(multiexp_inner(pool, bases, density_map, exponents, skip, c, false))
// .map(move |(this, mut higher)| {
// for _ in 0..c {
// higher.double();
// }
// higher.add_assign(&this);
// higher
// })
)
}
}
/// Perform multi-exponentiation. The caller is responsible for ensuring the
/// query size is the same as the number of exponents.
pub fn multiexp<Q, D, G, S>(
pool: &Worker,
bases: S,
density_map: D,
exponents: Arc<Vec<<<G::Engine as Engine>::Fr as PrimeField>::Repr>>
) -> Box<<G as CurveAffine>::Projective>
where for<'a> &'a Q: QueryDensity,
D: Send + Sync + 'static + Clone + AsRef<Q>,
G: CurveAffine,
S: SourceBuilder<G>
{
let c = if exponents.len() < 32 {
3u32
} else {
(f64::from(exponents.len() as u32)).ln().ceil() as u32
};
if let Some(query_size) = density_map.as_ref().get_query_size() {
// If the density map has a known query size, it should not be
// inconsistent with the number of exponents.
assert!(query_size == exponents.len());
}
multiexp_inner(pool, bases, density_map, exponents, 0, c, true)
}
#[test]
fn test_with_bls12() {
fn naive_multiexp<G: CurveAffine>(
bases: Arc<Vec<G>>,
exponents: Arc<Vec<<G::Scalar as PrimeField>::Repr>>
) -> G::Projective
{
assert_eq!(bases.len(), exponents.len());
let mut acc = G::Projective::zero();
for (base, exp) in bases.iter().zip(exponents.iter()) {
acc.add_assign(&base.mul(*exp));
}
acc
}
use rand::{self, Rand};
use pairing::bls12_381::Bls12;
const SAMPLES: usize = 1 << 14;
let rng = &mut rand::thread_rng();
let v = Arc::new((0..SAMPLES).map(|_| <Bls12 as Engine>::Fr::rand(rng).into_repr()).collect::<Vec<_>>());
let g = Arc::new((0..SAMPLES).map(|_| <Bls12 as Engine>::G1::rand(rng).into_affine()).collect::<Vec<_>>());
let naive = naive_multiexp(g.clone(), v.clone());
let pool = Worker::new();
let fast = multiexp(
&pool,
(g, 0),
FullDensity,
v
);
assert_eq!(naive, *fast);
}
| 27.275641 | 111 | 0.519624 |
ff44a0a92af90b0e8209e57d28962ba6f2ed0532 | 1,727 |
pub struct IconGrain {
props: crate::Props,
}
impl yew::Component for IconGrain {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M10 12c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zM6 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm0 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm12-8c1.1 0 2-.9 2-2s-.9-2-2-2-2 .9-2 2 .9 2 2 2zm-4 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm4-4c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm-4-4c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm-4-4c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2z"/></svg>
</svg>
}
}
}
| 37.543478 | 542 | 0.551824 |
50c87e54ca764ed477be9576680f9beafb92448b | 10,880 | use super::is_valid_nick;
#[derive(Debug, PartialEq)]
pub enum Error {
MissingCommand,
MissingData,
MissingTarget,
MissingParts,
InvalidNickname,
UnknownCommand,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Command {
Privmsg {
target: String,
data: String,
is_notice: bool,
},
// TODO support multiple channels
Join {
channel: String,
key: Option<String>,
},
// TODO support multiple channels
Part {
channel: String,
reason: Option<String>,
},
Quit {
reason: String,
},
Nick {
nickname: String,
},
Ping {
token: String,
},
Pong {
target: String,
},
Error {
message: String,
},
// TODO CAP and TAG
Other {
command: String,
params: Vec<String>,
},
Reply {
numeric: u16,
params: Vec<String>,
},
}
impl Command {
pub fn parse(input: &str) -> Result<Self, Error> {
let pos = input.find(' ').ok_or_else(|| Error::MissingCommand)?;
let (command, rest) = input.split_at(pos);
let (command, rest) = (command.trim(), rest.trim());
let msg = match command {
"PRIVMSG" | "NOTICE" => {
let (l, r) = rest.split_at(rest.find(':').ok_or_else(|| Error::MissingData)?);
let (target, data) = (l.trim(), &r.trim()[1..]);
if target.is_empty() {
return Err(Error::MissingTarget);
}
if data.is_empty() {
return Err(Error::MissingData);
}
// TODO determine if target is a channel
Command::Privmsg {
target: target.into(),
data: data.into(),
is_notice: command == "NOTICE",
}
}
"JOIN" => {
if rest.is_empty() {
return Err(Error::MissingTarget);
}
let mut parts = rest.split(' ');
let channel = parts
.next()
.ok_or_else(|| Error::MissingTarget)?
.split(',')
.next()
.unwrap()
.to_owned();
let key = parts
.next()
.and_then(|s| s.split(',').next().map(|s| s.to_owned()));
Command::Join { channel, key }
}
"PART" => {
if rest.is_empty() {
return Err(Error::MissingTarget);
}
let mut parts = rest.split(' ');
let channel = parts
.next()
.ok_or_else(|| Error::MissingTarget)?
.split(',')
.next()
.unwrap()
.to_owned();
let reason = parts.next().map(|s| s.to_owned());
Command::Part { channel, reason }
}
"QUIT" => {
if rest.get(0..1) != Some(":") {
return Err(Error::MissingData);
}
Command::Quit {
reason: rest[1..].to_owned(),
}
}
"NICK" => {
if !is_valid_nick(rest) || rest.is_empty() {
return Err(Error::InvalidNickname);
}
Command::Nick {
nickname: rest.to_owned(),
}
}
"PING" => Command::Ping {
token: rest.to_owned(),
},
"PONG" => Command::Pong {
target: rest.to_owned(),
},
"ERROR" => {
if rest.get(0..1) != Some(":") {
return Err(Error::MissingData);
}
Command::Error {
message: rest[1..].to_owned(),
}
}
command => {
let params = if let Some(pos) = rest.find(':') {
let (l, r) = rest.split_at(pos);
let (l, r) = (l.trim(), r.trim());
let r = if r.get(0..1) == Some(":") { &r[1..] } else { r }.to_owned();
if l.is_empty() {
vec![r]
} else {
let mut v = l.split(' ').map(|s| s.to_owned()).collect::<Vec<_>>();
v.push(r);
v
}
} else {
rest.split(' ').map(|s| s.to_owned()).collect::<Vec<_>>()
};
if let Ok(n) = command.parse::<u16>() {
Command::Reply { numeric: n, params }
} else {
Command::Other {
command: command.to_owned(),
params,
}
}
}
};
Ok(msg)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_privmsg() {
let input = "PRIVMSG #testchannel :this is a message";
let command = Command::parse(input);
assert_eq!(
command,
Ok(Command::Privmsg {
target: "#testchannel".into(),
data: "this is a message".into(),
is_notice: false
})
);
let input = "PRIVMSG #testchannel";
let command = Command::parse(input);
assert_eq!(command, Err(Error::MissingData));
let input = "PRIVMSG #testchannel :";
let command = Command::parse(input);
assert_eq!(command, Err(Error::MissingData));
let input = "PRIVMSG :this is a test";
let command = Command::parse(input);
assert_eq!(command, Err(Error::MissingTarget));
}
#[test]
fn parse_notice() {
let input = "NOTICE #testchannel :this is a message";
let command = Command::parse(input);
assert_eq!(
command,
Ok(Command::Privmsg {
target: "#testchannel".into(),
data: "this is a message".into(),
is_notice: true
})
);
}
#[test]
fn parse_join() {
let inputs = &[
("JOIN #test", ("#test", None)),
("JOIN #test,&channel", ("#test", None)),
("JOIN #test,&channel key1", ("#test", Some("key1"))),
("JOIN #test,&channel key1,key2", ("#test", Some("key1"))),
];
for input in inputs {
let command = Command::parse(input.0);
assert_eq!(
command,
Ok(Command::Join {
channel: (input.1).0.into(),
key: (input.1).1.map(|s| s.into()),
})
);
}
let command = Command::parse("JOIN ");
assert_eq!(command, Err(Error::MissingTarget));
}
#[test]
fn parse_part() {
let inputs = &[
("PART #test", ("#test", None)),
("PART #test,&channel", ("#test", None)),
("PART #test,&channel bye", ("#test", Some("bye"))),
];
for input in inputs {
let command = Command::parse(input.0);
assert_eq!(
command,
Ok(Command::Part {
channel: (input.1).0.into(),
reason: (input.1).1.map(|s| s.into()),
})
);
}
let command = Command::parse("PART ");
assert_eq!(command, Err(Error::MissingTarget));
}
#[test]
fn parse_quit() {
let input = "QUIT :this is a quit message";
let command = Command::parse(input);
assert_eq!(
command,
Ok(Command::Quit {
reason: "this is a quit message".into()
})
);
let command = Command::parse("QUIT this is a bad message");
assert_eq!(command, Err(Error::MissingData));
}
#[test]
fn parse_nick() {
let input = "NICK test_user";
let command = Command::parse(input);
assert_eq!(
command,
Ok(Command::Nick {
nickname: "test_user".into()
})
);
let command = Command::parse("NICK ");
assert_eq!(command, Err(Error::InvalidNickname));
}
#[test]
fn parse_ping() {
let inputs = &[("PING test", "test"), ("PING :test", ":test")];
for input in inputs {
assert_eq!(
Command::parse(input.0),
Ok(Command::Ping {
token: input.1.into()
})
);
}
}
#[test]
fn parse_pong() {
let inputs = &[("PONG test", "test"), ("PONG :test", ":test")];
for input in inputs {
assert_eq!(
Command::parse(input.0),
Ok(Command::Pong {
target: input.1.into()
})
);
}
}
#[test]
fn parse_error() {
assert_eq!(
Command::parse("ERROR :test"),
Ok(Command::Error {
message: "test".into()
})
);
assert_eq!(Command::parse("ERROR test"), Err(Error::MissingData));
}
#[test]
fn parse_reply() {
assert_eq!(
Command::parse("001 :Welcome to the Internet Relay Network test!user@localhost"),
Ok(Command::Reply {
numeric: 001,
params: vec!["Welcome to the Internet Relay Network test!user@localhost".into()]
})
);
assert_eq!(
Command::parse("312 user irc.localhost :some info"),
Ok(Command::Reply {
numeric: 312,
params: vec!["user", "irc.localhost", "some info"]
.into_iter()
.map(|s| s.into())
.collect()
})
);
}
#[test]
fn parse_other() {
assert_eq!(
Command::parse("WHOIS eff.org trillian"),
Ok(Command::Other {
command: "WHOIS".into(),
params: vec!["eff.org", "trillian"]
.into_iter()
.map(|s| s.into())
.collect()
})
);
assert_eq!(
Command::parse("USER guest 0 * :Some user"),
Ok(Command::Other {
command: "USER".into(),
params: vec!["guest", "0", "*", "Some user"]
.into_iter()
.map(|s| s.into())
.collect()
})
);
}
}
| 27.474747 | 96 | 0.408915 |
def19990b96f5d2a4d2f227df3934f1805a936a6 | 4,509 | // Copyright (c) 2015 Daniel Grunwald
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use std::sync::{Once, ONCE_INIT};
use ffi;
use python::Python;
static START: Once = ONCE_INIT;
/// Prepares the use of python in a free-threaded context.
///
/// If the python interpreter is not already initialized, this function
/// will initialize it with disabled signal handling
/// (python will not raise the `KeyboardInterrupt` exception).
/// Python signal handling depends on the notion of a 'main thread', which must be
/// the thread that initializes the python interpreter.
pub fn prepare_freethreaded_python() {
// Protect against race conditions when python is not yet initialized
// and multiple threads concurrently call 'prepare_freethreaded_python()'.
// Note that we do not protect against concurrent initialization of the python runtime
// by other users of the python C API.
START.call_once(|| unsafe {
if ffi::Py_IsInitialized() != 0 {
// If python is already initialized, we expect python threading to also be initialized,
// as we can't make the existing python main thread acquire the GIL.
assert!(ffi::PyEval_ThreadsInitialized() != 0);
} else {
// If python isn't initialized yet, we expect that python threading isn't initialized either.
assert!(ffi::PyEval_ThreadsInitialized() == 0);
// Initialize python.
// We use Py_InitializeEx() with initsigs=0 to disable Python signal handling.
// Signal handling depends on the notion of a 'main thread', which doesn't exist in this case.
// Note that the 'main thread' notion in python isn't documented properly;
// and running python without one is not officially supported.
ffi::Py_InitializeEx(0);
ffi::PyEval_InitThreads();
// PyEval_InitThreads() will acquire the GIL,
// but we don't want to hold it at this point
// (it's not acquired in the other code paths)
// So immediately release the GIL:
let _thread_state = ffi::PyEval_SaveThread();
// Note that the PyThreadState returned by PyEval_SaveThread is also held in TLS by the python runtime,
// and will be restored by PyGILState_Ensure.
}
});
}
/// RAII type that represents an acquired GIL.
#[must_use]
pub struct GILGuard {
gstate: ffi::PyGILState_STATE
}
/// GILGuard is not Send because the GIL must be released
/// by the same thread that acquired it.
impl !Send for GILGuard {}
/// The Drop implementation for GILGuard will release the GIL.
impl Drop for GILGuard {
fn drop(&mut self) {
unsafe { ffi::PyGILState_Release(self.gstate) }
}
}
impl GILGuard {
/// Acquires the global interpreter lock, which allows access to the Python runtime.
/// If the python runtime is not already initialized, this function will initialize it.
/// Note that in this case, the python runtime will not have any main thread, and will
/// not deliver signals like KeyboardInterrupt.
pub fn acquire() -> GILGuard {
::pythonrun::prepare_freethreaded_python();
let gstate = unsafe { ffi::PyGILState_Ensure() }; // acquire GIL
GILGuard { gstate: gstate }
}
/// Retrieves the marker type that proves that the GIL was acquired.
pub fn python<'p>(&'p self) -> Python<'p> {
unsafe { Python::assume_gil_acquired() }
}
}
| 46.484536 | 115 | 0.694389 |
ed808f5dabfaa38c6aecf1fe48fb1620ba477a74 | 13,509 | #![allow(unused_braces)]
//! # Direct Memory Access Controller
//!
//! This library provides a type-safe API with compile-time guarantees
//! that the peripheral and individual DMA channels are correctly configured
//! before launching a DMA transfer.
//!
//! This module currently supports most basic DMA
//! functions, including memory-to-memory,
//! memory-to-peripheral, peripheral-to-memory,
//! and peripheral-to-peripheral transfers.
//! One-shot and circular transfers are supported. More complex
//! transfer configurations, including multi-buffer
//! (linked-list descriptor) transfers, are not currently supported.
//!
//! Transfers are supported for `i8`, `u8`, `i16`, `u16`, `i32`, `u32` and `f32`
//! beat sizes.
//!
//! # Enabling DMA support
//!
//! You must enable the `dma` feature in your board support crate
//! or final executable.
//!
//! Add this to your `Cargo.toml`:
//! ```
//! [features]
//! dma = ["atsamd-hal/dma"]
//! ```
//!
//! # Channels and RAM
//!
//! Using DMA channels require a certain amount of RAM - 32 bytes per channel,
//! to be exact. RAM will be not allocated unless the `dma` feature is enabled
//! for the HAL. By default, half the channels available on the chip are
//! enabled. If you need all DMA channels enabled, enable the `max-channels`
//! feature in your board support crate or final executable.
//!
//! `Cargo.toml`
//! ```
//! [features]
//! dma = ["atsamd-hal/dma"]
//! max-channels = ["dma", "atsamd-hal/max-channels"]
//! ```
//!
//! RAM usage per chip family:
//!
//! * `ATSAMD11` - 3 channels (default): 96 bytes
//!
//! * `ATSAMD11` - 6 channels (max): 192 bytes
//!
//! * `ATSAMD21` - 6 channels (default): 192 bytes
//!
//! * `ATSAMD21`: - 12 channels (max): 384 bytes
//!
//! * `ATSAMD51/ATSAME5x`: - 16 channels (default): 512 bytes
//!
//! * `ATSAMD51/ATSAME5x`: - 32 channels (max): 1024 bytes
//!
//! # Priority levels and Arbitration
//!
//! The DMAC features 4 priority levels. Level 3 has the highest priority
//! and level 0 has the lowest. Each channel can be assigned to one priority
//! level. If two channels with the same priority level are requested to
//! execute a transfer at the same time, the lowest channel number will have
//! priority (in the default, ie static, arbitration scheme).
//!
//! By default, all priority levels are enabled when initializing the DMAC
//! (see [`DmaController::init`]). Levels
//! can be enabled or disabled through the
//! [`DmaController::enable_levels`] and
//! [`DmaController::disable_levels`] methods. These methods must be supplied a
//! [`PriorityLevelMask`].
//!
//! Round-Robin Arbitration can be enabled for multiple priority levels
//! simultaneously by using the
//! [`DmaController::round_robin_arbitration`] and
//! [`DmaController::static_arbitration`] methods. These methods must be
//! supplied a [`RoundRobinMask`]. By default, all priority levels are
//! initialized with a static arbitration scheme. See ATSAMD21 datasheet section
//! 19.6.2.4 for more information.
//!
//! # Interrupts
//!
//! This driver does not use or manage interrupts issued by the DMAC. Individual
//! channels can be configured to generate interrupts when the transfer is
//! complete, an error is detected or the channel is suspended. However, these
//! interrupts will not be triggered unless the DMAC interrupt is unmasked in
//! the NVIC. You will be responsible for clearing the interrupt flags in the
//! ISR.
//!
//! # About static lifetimes
//!
//! The safe API this driver offers requires all buffers (source and
//! destination) to have `'static` lifetimes. This is because
//! [`mem::forget`](core::mem::forget) is a safe API, and therefore relying on
//! [`mem::drop`](core::mem::drop) to terminate or abort a transfer
//! does not guarantee the transfer will be terminated (specifically if
//! [`mem::forget`](core::mem::forget) is called on a `Transfer` containaing
//! a `Channel<Id, Busy>`). This could cause the compiler to reclaim
//! stack-allocated buffers for reuse while the DMAC is still writing to/reading
//! from them! Needless to say that is very unsafe.
//! Refer [here](https://docs.rust-embedded.org/embedonomicon/dma.html#memforget)
//! or [here](https://blog.japaric.io/safe-dma/) for more information. You may choose to forego
//! the `'static` lifetimes by using the unsafe API and the
//! [`Transfer::new_unchecked`](transfer::Transfer::new_unchecked) method.
//!
//! # Unsafe API
//!
//! This driver also offers an `unsafe` API through the
//! [`Transfer::new_unchecked`] method. It
//! does not enforce `'static` lifetimes, and allow using buffers of different
//! lengths. If you choose to use these methods, you MUST prove that
//! a `Transfer` containing a `Channel<Id, Busy>` will NEVER be dropped. You
//! *must* call `wait()` or `stop()` manually on every
//! `Transfer` that has been created using the unsafe API. No destructor or
//! `Drop` implementation is offered for `Transfer`s.
//!
//! Additionally, you can (unsafely) implement your own buffer types through the
//! unsafe [`Buffer`](transfer::Buffer) trait.
//!
//! # Example
//! ```
//! let mut peripherals = Peripherals::take().unwrap();
//! let mut dmac = DmaController::init(peripherals.DMAC, &mut peripherals.PM);
//! // Get individual handles to DMA channels
//! let channels = dmac.split();
//!
//! // Initialize DMA Channel 0
//! let chan0 = channels.0.init(PriorityLevel::LVL0, false, &mut dmac);
//!
//! // Setup a DMA transfer (memory-to-memory -> incrementing source, incrementing destination)
//! // NOTE: buf_src and buf_dest should be either:
//! // &'static mut T, &'static mut [T], or &'static mut [T; N] where T: BeatSize
//! let xfer = Transfer::new(chan0, buf_src, buf_dest, false).begin(
//! &mut dmac,
//! TriggerSource::DISABLE,
//! TriggerAction::BLOCK,
//! );
//!
//! // Wait for transfer to complete and grab resulting buffers
//! let (chan0, buf_src, buf_dest, _) = xfer.wait(&mut dmac);
//!
//! // (Optional) free the [`DmaController`] struct and return the underlying PAC struct
//! channels.0 = chan0.into();
//! let dmac = dmac.free(channels, &mut peripherals.PM);
//! ```
//!
//! # [`Transfer`] recycling
//!
//! A common use-case with DMAC transfers is to trigger a new transfer as soon
//! as the old transfer is completed. To avoid having to
//! [`stop`](Transfer::stop) a [`Transfer`], build a new [`Transfer`] (with
//! [`new`](Transfer::new) or [`new_from_arrays`](Transfer::new_from_arrays))
//! then call [`begin`](Transfer::begin), a [`Transfer::recycle`] method
//! is provided. If the buffer lengths match and the previous transfer is
//! completed, a new transfer will immediately be triggered using the provided
//! source and destination buffers. If the recycling operation is succesful,
//! `Ok((source, destination))` containing the old source and destination
//! buffers is returned. Otherwise, `Err(_)` is returned.
//!
//! ```
//! let new_source = produce_source();
//! let new_destination = produce_destination();
//!
//! // Assume xfer is a `Busy` `Transfer`
//! let (old_source, old_dest) = xfer.recycle(new_source, new_destination).unwrap();
//! ```
//!
//! # Waker operation
//!
//! A [`Transfer`] can also accept a function or closure that will be called on
//! completion of the transaction, acting like a waker.
//!
//! ```
//! fn wake_up() {
//! //...
//! }
//!
//! fn use_waker<const N: usize>(dmac: DmaController,
//! source: &'static mut [u8; N],
//! destination: &'static mut [u8; N]
//! ){
//! let chan0 = dmac.split().0;
//! let xfer = Transfer::new_from_arrays(chan0, source, destination, false)
//! .with_waker(wake_up)
//! .begin();
//! //...
//! }
//! ```
//!
//! ## RTIC example
//!
//! The [RTIC] framework provides a convenient way to store a `static`ally
//! allocated [`Transfer`], so that it can be accessed by both the interrupt
//! handlers and user code. The following example shows how [`Transfer`]s might
//! be used for a series of transactions. It uses features from the latest
//! release of [RTIC], `v0.6-alpha.4`.
//!
//! ```
//! use atsamd_hal::dmac::*;
//!
//! const LENGTH: usize = 50;
//! type TransferBuffer = &'static mut [u8; LENGTH];
//! type Xfer = Transfer<Channel<Ch0, Busy>, TransferBuffer, TransferBuffer>;
//!
//! #[resources]
//! struct Resources {
//! #[lock_free]
//! #[init(None)]
//! opt_xfer: Option<Xfer>,
//!
//! #[lock_free]
//! #[init(None)]
//! opt_channel: Option<Channel<Ch0, Ready>>,
//! }
//!
//! // Note: Assume interrupts have already been enabled for the concerned channel
//! #[task(resources = [opt_xfer, opt_channel])]
//! fn task(ctx: task::Context) {
//! let task::Context { opt_xfer } = ctx;
//! match opt_xfer {
//! Some(xfer) => {
//! if xfer.complete() {
//! let (chan0, _source, dest, _payload) = xfer.take().unwrap().stop();
//! *opt_channel = Some(chan0);
//! consume_data(buf);
//! }
//! }
//! None => {
//! if let Some(chan0) = opt_channel.take() {
//! let source: [u8; 50] = produce_source();
//! let dest: [u8; 50] = produce_destination();
//! let xfer = opt_xfer.get_or_insert(
//! Transfer::new_from_arrays(channel0, source, destination)
//! .with_waker(|| { task::spawn().ok(); })
//! .begin()
//! );
//! }
//! }
//! }
//! }
//!
//! #[task(binds = DMAC, resources = [opt_future])]
//! fn tcmpl(ctx: tcmpl::Context) {
//! ctx.resources.opt_xfer.as_mut().unwrap().callback();
//! }
//! ```
//! [RTIC]: https://rtic.rs
// This is necessary until modular_bitfield fixes all their identity_op warnings
#![allow(clippy::identity_op)]
use modular_bitfield::prelude::*;
pub use channel::*;
pub use dma_controller::*;
pub use transfer::*;
#[derive(Debug)]
/// Runtime errors that may occur when dealing with DMA transfers.
pub enum Error {
/// Supplied buffers both have lengths > 1 beat, but not equal to each other
///
/// Buffers need to either have the same length in beats, or one should have
/// length == 1. In cases where one buffer is length 1, that buffer will be
/// the source or destination of each beat in the transfer. If both buffers
/// had length >1, but not equal to each other, then it would not be clear
/// how to structure the transfer.
LengthMismatch,
/// Operation is not valid in the current state of the object.
InvalidState,
}
/// Result for DMAC operations
pub type Result<T> = core::result::Result<T, Error>;
#[cfg(all(feature = "samd11", feature = "max-channels"))]
#[macro_export]
macro_rules! with_num_channels {
($some_macro:ident) => {
$some_macro! {6}
};
}
#[cfg(all(feature = "samd21", feature = "max-channels"))]
#[macro_export]
macro_rules! with_num_channels {
($some_macro:ident) => {
$some_macro! {12}
};
}
#[cfg(all(feature = "min-samd51g", feature = "max-channels"))]
#[macro_export]
macro_rules! with_num_channels {
($some_macro:ident) => {
$some_macro! {32}
};
}
#[cfg(all(feature = "samd11", not(feature = "max-channels")))]
#[macro_export]
macro_rules! with_num_channels {
($some_macro:ident) => {
$some_macro! {3}
};
}
#[cfg(all(feature = "samd21", not(feature = "max-channels")))]
#[macro_export]
macro_rules! with_num_channels {
($some_macro:ident) => {
$some_macro! {6}
};
}
#[cfg(all(feature = "min-samd51g", not(feature = "max-channels")))]
#[macro_export]
macro_rules! with_num_channels {
($some_macro:ident) => {
$some_macro! {16}
};
}
macro_rules! get {
($literal:literal) => {
$literal
};
}
/// Number of DMA channels used by the driver
pub const NUM_CHANNELS: usize = with_num_channels!(get);
// ----- DMAC SRAM registers ----- //
/// Bitfield representing the BTCTRL SRAM DMAC register
#[bitfield]
#[derive(Clone, Copy)]
#[repr(u16)]
#[doc(hidden)]
pub struct BlockTransferControl {
#[allow(dead_code)]
valid: bool,
#[allow(dead_code)]
evosel: B2,
#[allow(dead_code)]
blockact: B2,
#[skip]
_reserved: B3,
#[bits = 2]
#[allow(dead_code)]
beatsize: BeatSize,
#[allow(dead_code)]
srcinc: bool,
#[allow(dead_code)]
dstinc: bool,
#[allow(dead_code)]
stepsel: bool,
#[allow(dead_code)]
stepsize: B3,
}
/// Descriptor representing a SRAM register. Datasheet section 19.8.2
#[derive(Clone, Copy)]
#[repr(C, align(16))]
#[doc(hidden)]
pub struct DmacDescriptor {
btctrl: BlockTransferControl,
btcnt: u16,
srcaddr: *const (),
dstaddr: *const (),
descaddr: *const DmacDescriptor,
}
#[doc(hidden)]
pub const DEFAULT_DESCRIPTOR: DmacDescriptor = DmacDescriptor {
btctrl: BlockTransferControl::new(),
btcnt: 0,
srcaddr: 0 as *mut _,
dstaddr: 0 as *mut _,
descaddr: 0 as *mut _,
};
// Writeback section. This static variable should never be written to in an
// interrupt or thread context.
#[doc(hidden)]
static mut WRITEBACK: [DmacDescriptor; NUM_CHANNELS] = [DEFAULT_DESCRIPTOR; NUM_CHANNELS];
// Descriptor section. This static variable should never be written to in an
// interrupt or thread context.
#[doc(hidden)]
static mut DESCRIPTOR_SECTION: [DmacDescriptor; NUM_CHANNELS] = [DEFAULT_DESCRIPTOR; NUM_CHANNELS];
pub mod channel;
pub mod dma_controller;
pub mod transfer;
| 34.027708 | 99 | 0.65201 |
ff76ab2728da046bb5481c85ae6fdd956b512ab1 | 1,375 | /// An event on the "Standard Gamepad" from w3c shown below.
///
/// 
pub enum Event {
/// Bottom right cluster (A / Circle / Return / Right Click).
Accept(bool),
/// Bottom right cluster (B / X / Shift).
Cancel(bool),
/// Leftmost button in right cluster (Y / X / Square / Left Click).
Common(bool),
/// Topmost button in right cluster (X / Y / Triangle / Space).
Action(bool),
/// Up arrow / D-pad
Up(bool),
/// Down arrow / D-pad
Down(bool),
/// Left arrow / D-pad
Left(bool),
/// Right arrow / D-pad
Right(bool),
/// Back / Select Button (Escape)
Back(bool),
/// Forward / Start Button (Tab)
Forward(bool),
/// Near L - "Inventory" (E)
L(bool),
/// Near R - "Use" (R)
R(bool),
/// Far L Throttle - "Sneak" (Ctrl)
Lz(i8),
/// Far R Throttle - "Precision Action" (Alt)
Rz(i8),
/// Right Joystick (A / D)
MotionH(i8),
/// Left Joystick (W / S)
MotionV(i8),
/// Left Joystick (Mouse X Position)
CameraH(i8),
/// Right Joystick (Mouse Y Position)
CameraV(i8),
/// Left Joystick Button (Middle Click)
MotionButton(bool),
/// Right Joystick Button (F)
CameraButton(bool),
/// Home button (Target platform application close)
Exit,
}
| 25 | 75 | 0.571636 |
89c74f338dc68deec85f61ed31f9dc222b692c69 | 3,651 | // copyright 2018 the google ai language team authors and the huggingface inc. team.
// copyright (c) 2018, nvidia corporation. all rights reserved.
// copyright (c) 2019 the sticker developers.
//
// licensed under the apache license, version 2.0 (the "license");
// you may not use this file except in compliance with the license.
// you may obtain a copy of the license at
//
// http://www.apache.org/licenses/license-2.0
//
// unless required by applicable law or agreed to in writing, software
// distributed under the license is distributed on an "as is" basis,
// without warranties or conditions of any kind, either express or implied.
// see the license for the specific language governing permissions and
// limitations under the license.
use std::collections::HashMap;
use tch::nn::VarStore;
use tch::Tensor;
use super::{Optimizer, ZeroGrad};
/// Internal Adam state.
struct AdamWState {
step: usize,
exp_avg: Tensor,
exp_avg_sq: Tensor,
}
/// Adam optimizer configuration.
#[derive(Clone, Copy, Debug)]
pub struct AdamWConfig {
pub betas: (f64, f64),
pub correct_bias: bool,
pub eps: f64,
pub lr: f64,
pub weight_decay: f64,
}
impl Default for AdamWConfig {
fn default() -> Self {
AdamWConfig {
betas: (0.9, 0.999),
correct_bias: false,
eps: 1e-6,
lr: 1e-3,
weight_decay: 0.,
}
}
}
/// Adam algorithm with weight decay fix.
pub struct AdamW<'a> {
state: HashMap<String, AdamWState>,
vs: &'a VarStore,
}
impl<'a> AdamW<'a> {
pub fn new(vs: &'a VarStore) -> Self {
AdamW {
state: HashMap::new(),
vs,
}
}
}
impl<'a> Optimizer for AdamW<'a> {
type Config = AdamWConfig;
fn backward_step<F>(&mut self, loss: &Tensor, config_fun: F)
where
F: Fn(&str) -> Self::Config,
{
self.vs.zero_grad();
loss.backward();
tch::no_grad(|| self.step(config_fun));
}
fn step<F>(&mut self, config_fun: F)
where
F: Fn(&str) -> Self::Config,
{
for (name, mut tensor) in self.vs.variables() {
if !tensor.grad().defined() {
continue;
}
let config = config_fun(&name);
let grad = tensor.grad();
let mut state = self.state.entry(name.to_string()).or_insert(AdamWState {
step: 0,
exp_avg: Tensor::zeros_like(&tensor),
exp_avg_sq: Tensor::zeros_like(&tensor),
});
state.step += 1;
// Decay the first and second moment running average coefficient
// In-place operations to update the averages at the same time
state.exp_avg *= config.betas.0;
state.exp_avg += (1. - config.betas.0) * &grad;
state.exp_avg_sq *= config.betas.1;
state.exp_avg_sq += (1. - config.betas.1) * &grad * &grad;
let mut denom = state.exp_avg_sq.sqrt();
denom += config.eps;
let mut step_size = config.lr;
if config.correct_bias {
let bias_correction1 = 1.0 - config.betas.0.powf(state.step as f64);
let bias_correction2 = 1.0 - config.betas.1.powf(state.step as f64);
step_size *= bias_correction2.sqrt() / bias_correction1;
}
tensor += -step_size * (&state.exp_avg / denom);
if config.weight_decay > 0. {
tensor += -config.lr * config.weight_decay * &tensor;
}
}
}
fn var_store(&self) -> &VarStore {
self.vs
}
}
| 28.302326 | 85 | 0.57628 |
cc2414706cdb5ec0183f9e57bbdc922042ca72fa | 5,948 | // Copyright 2020 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
use color_eyre::{eyre::eyre, Result};
use sn_api::fetch::SafeData;
use sn_cmd_test_utilities::util::{
create_and_get_keys, get_random_nrs_string, parse_dog_output, parse_files_put_or_sync_output,
safe_cmd, safe_cmd_stdout, safeurl_from,
};
const TEST_FILE: &str = "./testdata/test.md";
#[test]
fn calling_safe_dog_files_container_nrsurl() -> Result<()> {
let content = safe_cmd_stdout(["files", "put", TEST_FILE, "--json"], Some(0))?;
let (container_xorurl, _files_map) = parse_files_put_or_sync_output(&content);
let nrsurl = get_random_nrs_string();
safe_cmd(["nrs", "create", &nrsurl, "-l", &container_xorurl], Some(0))?;
let dog_output = safe_cmd_stdout(["dog", &nrsurl, "--json"], Some(0))?;
let (url, mut content): (String, Vec<SafeData>) =
serde_json::from_str(&dog_output).expect("Failed to parse output of `safe dog` on file");
assert_eq!(url, nrsurl);
if let Some(SafeData::FilesContainer { resolved_from, .. }) = content.pop() {
assert_eq!(resolved_from, container_xorurl);
Ok(())
} else {
panic!("Content retrieved was unexpected: {:?}", content);
}
}
#[test]
fn calling_safe_dog_files_container_nrsurl_jsoncompact() -> Result<()> {
let content = safe_cmd_stdout(["files", "put", TEST_FILE, "--output=jsoncompact"], Some(0))?;
let (container_xorurl, _files_map) = parse_files_put_or_sync_output(&content);
let nrsurl = get_random_nrs_string();
safe_cmd(["nrs", "create", &nrsurl, "-l", &container_xorurl], Some(0))?;
let dog_output = safe_cmd_stdout(["dog", &nrsurl, "--output=jsoncompact"], Some(0))?;
let (url, mut content): (String, Vec<SafeData>) =
serde_json::from_str(&dog_output).expect("Failed to parse output of `safe dog`");
assert_eq!(url, nrsurl);
if let Some(SafeData::FilesContainer { resolved_from, .. }) = content.pop() {
assert_eq!(resolved_from, container_xorurl);
Ok(())
} else {
panic!("Content retrieved was unexpected: {:?}", content);
}
}
#[test]
fn calling_safe_dog_files_container_nrsurl_yaml() -> Result<()> {
let content = safe_cmd_stdout(["files", "put", TEST_FILE, "--json"], Some(0))?;
let (container_xorurl, _files_map) = parse_files_put_or_sync_output(&content);
let nrsurl = get_random_nrs_string();
let _ = safe_cmd_stdout(["nrs", "create", &nrsurl, "-l", &container_xorurl], Some(0))?;
let dog_output = safe_cmd_stdout(["dog", &nrsurl, "--output=yaml"], Some(0))?;
let (url, mut content): (String, Vec<SafeData>) =
serde_yaml::from_str(&dog_output).expect("Failed to parse output of `safe dog`");
assert_eq!(url, nrsurl);
if let Some(SafeData::FilesContainer { resolved_from, .. }) = content.pop() {
assert_eq!(resolved_from, container_xorurl);
Ok(())
} else {
panic!("Content retrieved was unexpected: {:?}", content);
}
}
#[test]
fn calling_safe_dog_safekey_nrsurl() -> Result<()> {
let (safekey_xorurl, _sk) = create_and_get_keys()?;
let nrsurl = get_random_nrs_string();
safe_cmd(["nrs", "create", &nrsurl, "-l", &safekey_xorurl], Some(0))?;
let dog_output = safe_cmd_stdout(["dog", &nrsurl, "--json"], Some(0))?;
let (url, mut content): (String, Vec<SafeData>) =
serde_json::from_str(&dog_output).expect("Failed to parse output of `safe dog` on file");
assert_eq!(url, nrsurl);
if let Some(SafeData::SafeKey { resolved_from, .. }) = content.pop() {
assert_eq!(resolved_from, safekey_xorurl);
Ok(())
} else {
Err(eyre!("Content retrieved was unexpected: {:?}", content))
}
}
#[test]
#[ignore = "fails consistently with 'relative URL without a base'"]
fn calling_safe_dog_nrs_url_with_subnames() -> Result<()> {
let (safekey_xorurl, _sk) = create_and_get_keys()?;
let pub_name = get_random_nrs_string();
let nrsurl = format!("subname.{}", pub_name);
safe_cmd(["nrs", "create", &nrsurl, "-l", &safekey_xorurl], Some(0))?;
// let's check the output with NRS-URL first
let dog_output = safe_cmd_stdout(["dog", &nrsurl, "--json"], Some(0))?;
let (url, safe_data_vec) = parse_dog_output(&dog_output);
assert_eq!(url, nrsurl);
let mut safeurl = safeurl_from(&nrsurl)?;
safeurl.set_sub_names("").map_err(|e| eyre!(e))?;
let nrs_map_xorurl = safeurl.to_xorurl_string();
if let SafeData::NrsMapContainer {
resolved_from,
xorurl,
public_name,
..
} = &safe_data_vec[0]
{
assert_eq!(*resolved_from, nrsurl);
assert_eq!(*xorurl, nrs_map_xorurl);
assert_eq!(*public_name, Some(pub_name));
} else {
panic!("Content retrieved was unexpected: {:?}", safe_data_vec);
}
// let's now check the output with its XOR-URL
let dog_output = safe_cmd_stdout(["dog", &nrs_map_xorurl, "--json"], Some(0))?;
let (url, safe_data_vec) = parse_dog_output(&dog_output);
assert_eq!(url, *nrs_map_xorurl);
if let SafeData::NrsMapContainer {
resolved_from,
xorurl,
public_name,
..
} = &safe_data_vec[0]
{
assert_eq!(*resolved_from, nrs_map_xorurl);
assert_eq!(*xorurl, nrs_map_xorurl);
// it doesn't know the public name as it was resolved from a XOR-URL
assert_eq!(*public_name, None);
Ok(())
} else {
panic!("Content retrieved was unexpected: {:?}", safe_data_vec);
}
}
| 39.390728 | 97 | 0.653665 |
4b4e5d9fcc891132f3b8116a5b05be3c9bdd3e7b | 3,523 | use actix_web::{get,web, HttpResponse, Responder,HttpRequest};
use super::super::{AppState,Song};
use mongodb::{bson::{doc},options::FindOptions};
use futures::stream::{StreamExt};
use chrono::{Utc,DateTime,NaiveDateTime};
use serde::{Deserialize, Serialize};
#[get("/")]
pub async fn index(_request: HttpRequest) -> impl Responder {
HttpResponse::Ok().body("Welcome to slack_music_bot.rs")
}
#[derive(Debug, Serialize, Deserialize,Clone)]
#[serde(rename_all = "camelCase")]
pub struct PlaylistQuery {
#[serde(skip_serializing_if="Option::is_none")]
pub start_date: Option<String>,
#[serde(skip_serializing_if="Option::is_none")]
pub end_date: Option<String>
}
#[derive(Debug, Serialize, Deserialize,Clone)]
#[serde(rename_all = "camelCase")]
pub struct SongsQuery {
#[serde(skip_serializing_if="Option::is_none")]
pub page: Option<u64>,
#[serde(skip_serializing_if="Option::is_none")]
pub size: Option<u64>
}
#[get("/songs")]
pub async fn songs(app_state:web::Data<AppState>,query:web::Query<SongsQuery>) -> impl Responder {
let size = match query.size {
Some(value) => value,
None => 0
};
let page = match query.page {
Some(value) => value,
None => 0
};
let find_options = FindOptions::builder().projection(doc!{
"channel":0,
"user":0,
"client_message_id":0
}).skip(page * size).limit(size as i64).build();
let mut cursor = app_state.db.collection("playlist").find(doc!{
"shared_on":{
"$lte":Utc::today().and_hms(0, 0, 0)
}
},find_options).await.expect("Failed Mongo Query");
let mut all_songs:Vec<Song> = Vec::new();
while let Some(doc) = cursor.next().await {
match doc {
Ok(item) => {
all_songs.push(item)
}
Err(e) => println!("{}",e)
}
}
HttpResponse::Ok().content_type("application/json").json(all_songs).await
}
#[get("/playlist")]
pub async fn playlist(_request: HttpRequest,app_state:web::Data<AppState>,query:web::Query<PlaylistQuery>) -> impl Responder {
let find_options = FindOptions::builder().projection(doc!{
"channel":0,
"user":0,
"client_message_id":0
}).build();
let mut cursor= app_state.db.collection("playlist").find(doc!{
"shared_on": {
"$gte": match &query.start_date {
Some(start_date) => {
let start_date = start_date.parse::<i64>().unwrap();
DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(start_date, 0),Utc)
}
None => {
Utc::today().and_hms(0, 0, 0)
}
},
"$lte":match &query.end_date {
Some(end_date) => {
let end_date = end_date.parse::<i64>().unwrap();
DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(end_date, 0),Utc)
}
None => {
Utc::today().and_hms(23, 59, 59)
}
},
}
},find_options).await.expect("Failed mongo query");
let mut playlist:Vec<Song> = Vec::new();
while let Some(doc) = cursor.next().await {
match doc {
Ok(item) => {
playlist.push(item)
}
Err(e) => println!("{}",e)
}
}
println!("{:?}",playlist);
HttpResponse::Ok().content_type("application/json").json(playlist).await
}
| 34.203883 | 126 | 0.565711 |
90c2439dc34f4556322f383bc0e672c6069618dc | 814 | #![warn(clippy::non_send_fields_in_send_ty)]
#![feature(extern_types)]
use std::rc::Rc;
// Basic tests should not be affected
pub struct NoGeneric {
rc_is_not_send: Rc<String>,
}
unsafe impl Send for NoGeneric {}
pub struct MultiField<T> {
field1: T,
field2: T,
field3: T,
}
unsafe impl<T> Send for MultiField<T> {}
pub enum MyOption<T> {
MySome(T),
MyNone,
}
unsafe impl<T> Send for MyOption<T> {}
// All fields are disallowed when raw pointer heuristic is off
extern "C" {
type NonSend;
}
pub struct HeuristicTest {
field1: Vec<*const NonSend>,
field2: [*const NonSend; 3],
field3: (*const NonSend, *const NonSend, *const NonSend),
field4: (*const NonSend, Rc<u8>),
field5: Vec<Vec<*const NonSend>>,
}
unsafe impl Send for HeuristicTest {}
fn main() {}
| 18.5 | 62 | 0.663391 |
395551ea29f1902f8cf6bcdff731a895a6e18cc0 | 840 | // errors4.rs
// Make this test pass! Execute `rustlings hint errors4` for hints :)
use std::num::ParseIntError;
#[derive(PartialEq, Debug)]
struct PositiveNonzeroInteger(u64);
#[derive(PartialEq, Debug)]
enum CreationError {
Negative,
Zero,
}
impl PositiveNonzeroInteger {
fn new(value: i64) -> Result<PositiveNonzeroInteger, CreationError> {
if value > 0 {
Ok(PositiveNonzeroInteger(value as u64))
}else if value == 0{
Err(CreationError::Zero)
}else {
Err(CreationError::Negative)
}
}
}
#[test]
fn test_creation() {
assert!(PositiveNonzeroInteger::new(10).is_ok());
assert_eq!(
Err(CreationError::Negative),
PositiveNonzeroInteger::new(-10)
);
assert_eq!(Err(CreationError::Zero), PositiveNonzeroInteger::new(0));
}
| 22.702703 | 73 | 0.638095 |
714f77ede310c7b6d1391ad5e51c6253ff8582f5 | 2,715 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use common_datavalues::prelude::*;
use common_exception::ErrorCode;
use common_exception::Result;
use crate::scalars::scalar_unary_op;
use crate::scalars::EvalContext;
use crate::scalars::Function;
use crate::scalars::FunctionContext;
use crate::scalars::Monotonicity;
#[derive(Clone)]
pub struct RoundFunction {
display_name: String,
round: u32,
}
impl RoundFunction {
pub fn try_create(
display_name: &str,
args: &[&DataTypePtr],
round: u32,
) -> Result<Box<dyn Function>> {
if args[0].data_type_id() != TypeID::DateTime {
return Err(ErrorCode::BadDataValueType(format!(
"Function {} must have a DateTime type as argument, but got {}",
display_name,
args[0].name(),
)));
}
let s = Self {
display_name: display_name.to_owned(),
round,
};
Ok(Box::new(s))
}
// TODO: (sundy-li)
// Consider about the timezones/offsets
// Currently: assuming timezone offset is a multiple of round.
#[inline]
fn execute(&self, time: i64) -> i64 {
let round = self.round as i64;
time / round * round
}
}
impl Function for RoundFunction {
fn name(&self) -> &str {
self.display_name.as_str()
}
fn return_type(&self) -> DataTypePtr {
DateTimeType::arc(0, None)
}
fn eval(
&self,
_func_ctx: FunctionContext,
columns: &common_datavalues::ColumnsWithField,
_input_rows: usize,
) -> Result<common_datavalues::ColumnRef> {
let func = |val: i64, _ctx: &mut EvalContext| self.execute(val);
let col =
scalar_unary_op::<i64, _, _>(columns[0].column(), func, &mut EvalContext::default())?;
Ok(col.arc())
}
fn get_monotonicity(&self, args: &[Monotonicity]) -> Result<Monotonicity> {
Ok(Monotonicity::clone_without_range(&args[0]))
}
}
impl fmt::Display for RoundFunction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.display_name)
}
}
| 28.28125 | 98 | 0.624678 |
e695acd7d06c35248c0fb386c2ddeddd3439be87 | 5,908 | // Copyright 2018-2021 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Provides database upgrade functionality
mod error;
mod node_id;
mod receipt_store;
mod scabbard;
mod yaml;
use std::path::PathBuf;
use std::str::FromStr;
use clap::ArgMatches;
#[cfg(feature = "postgres")]
use splinter::store::postgres;
#[cfg(feature = "sqlite")]
use splinter::store::sqlite;
use splinter::{error::InternalError, store::StoreFactory};
#[cfg(feature = "sqlite")]
use crate::action::database::sqlite::{get_database_at_state_path, get_default_database};
use crate::action::database::{ConnectionUri, SplinterEnvironment};
use crate::diesel::{pg::PgConnection, Connection};
use crate::error::CliError;
use super::Action;
/// The overarching Action possibly containing multiple upgrade actions
pub struct UpgradeAction;
impl Action for UpgradeAction {
fn run<'a>(&mut self, arg_matches: Option<&ArgMatches<'a>>) -> Result<(), CliError> {
let state_dir = get_state_dir(arg_matches)?;
let database_uri = get_database_uri(arg_matches)?;
let store_factory = create_store_factory(database_uri).map_err(|err| {
CliError::ActionError(format!("failed to initialized store factory: {}", err))
})?;
info!("Upgrading splinterd state");
#[cfg(any(feature = "sqlite", feature = "postgres"))]
{
let db_store = store_factory.get_node_id_store();
node_id::migrate_node_id_to_db(state_dir.clone(), &*db_store)?;
}
info!(
"Source yaml state directory: {}",
state_dir.to_string_lossy()
);
let database_uri = get_database_uri(arg_matches)?;
info!("Destination database uri: {}", database_uri);
info!("Loading YAML datastore... ");
let db_store = store_factory.get_admin_service_store();
yaml::import_yaml_state_to_database(state_dir.as_path(), &*db_store)?;
scabbard::upgrade_scabbard_commit_hash_state(state_dir.as_path(), &database_uri).map_err(
|err| {
CliError::ActionError(format!(
"failed to upgrade scabbard commit hash state: {}",
err
))
},
)?;
receipt_store::upgrade_scabbard_receipt_store(state_dir.as_path(), &database_uri)?;
Ok(())
}
}
/// Gets the path of splinterd's state directory
///
///
/// # Arguments
///
/// * `arg_matches` - an option of clap ['ArgMatches'](https://docs.rs/clap/2.33.3/clap/struct.ArgMatches.html).
///
/// # Returns
///
/// * PathBuf to state_dir if present in arg_matches, otherwise just the default from
/// SplinterEnvironment
fn get_state_dir(arg_matches: Option<&ArgMatches>) -> Result<PathBuf, CliError> {
if let Some(arg_matches) = arg_matches {
match arg_matches.value_of("state_dir") {
Some(state_dir) => {
let state_dir = PathBuf::from(state_dir.to_string());
Ok(
std::fs::canonicalize(state_dir.as_path())
.unwrap_or_else(|_| state_dir.clone()),
)
}
None => Ok(SplinterEnvironment::load().get_state_path()),
}
} else {
Ok(SplinterEnvironment::load().get_state_path())
}
}
/// Gets the configured database_uri
///
///
/// # Arguments
///
/// * `arg_matches` - an option of clap ['ArgMatches'](https://docs.rs/clap/2.33.3/clap/struct.ArgMatches.html).
fn get_database_uri(arg_matches: Option<&ArgMatches>) -> Result<ConnectionUri, CliError> {
let database_uri = if let Some(arg_matches) = arg_matches {
match arg_matches.value_of("connect") {
Some(database_uri) => database_uri.to_string(),
#[cfg(feature = "sqlite")]
None => get_database_at_state_path(get_state_dir(Some(arg_matches))?)?,
#[cfg(not(feature = "sqlite"))]
None => get_default_database(),
}
} else if cfg!(feature = "sqlite") {
get_database_at_state_path(get_state_dir(arg_matches)?)?
} else {
get_default_database()?
};
let parsed_uri = ConnectionUri::from_str(&database_uri)
.map_err(|e| CliError::ActionError(format!("database uri could not be parsed: {}", e)))?;
if let ConnectionUri::Postgres(_) = parsed_uri {
// Verify database connection.
// If the connection is faulty, we want to abort here instead of
// creating the store, as the store would perform reconnection attempts.
PgConnection::establish(&database_uri[..]).map_err(|err| {
CliError::ActionError(format!(
"Failed to establish database connection to '{}': {}",
database_uri, err
))
})?;
}
Ok(parsed_uri)
}
fn create_store_factory(
connection_uri: ConnectionUri,
) -> Result<Box<dyn StoreFactory>, InternalError> {
match connection_uri {
#[cfg(feature = "postgres")]
ConnectionUri::Postgres(url) => {
let pool = postgres::create_postgres_connection_pool(&url)?;
Ok(Box::new(postgres::PgStoreFactory::new(pool)))
}
#[cfg(feature = "sqlite")]
ConnectionUri::Sqlite(conn_str) => {
let pool = sqlite::create_sqlite_connection_pool(&conn_str)?;
Ok(Box::new(sqlite::SqliteStoreFactory::new(pool)))
}
}
}
| 36.02439 | 112 | 0.635917 |
edd8e9948dc6ba1c1631d003e0d01c41eab72f64 | 8,049 | use std::{self, os::raw::*, ptr, time::Instant};
use crate::platform_impl::platform::{app_state::AppState, ffi};
#[link(name = "CoreFoundation", kind = "framework")]
extern "C" {
pub static kCFRunLoopCommonModes: CFRunLoopMode;
pub fn CFRunLoopGetMain() -> CFRunLoopRef;
pub fn CFRunLoopWakeUp(rl: CFRunLoopRef);
pub fn CFRunLoopObserverCreate(
allocator: CFAllocatorRef,
activities: CFOptionFlags,
repeats: ffi::Boolean,
order: CFIndex,
callout: CFRunLoopObserverCallBack,
context: *mut CFRunLoopObserverContext,
) -> CFRunLoopObserverRef;
pub fn CFRunLoopAddObserver(
rl: CFRunLoopRef,
observer: CFRunLoopObserverRef,
mode: CFRunLoopMode,
);
pub fn CFRunLoopTimerCreate(
allocator: CFAllocatorRef,
fireDate: CFAbsoluteTime,
interval: CFTimeInterval,
flags: CFOptionFlags,
order: CFIndex,
callout: CFRunLoopTimerCallBack,
context: *mut CFRunLoopTimerContext,
) -> CFRunLoopTimerRef;
pub fn CFRunLoopAddTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFRunLoopMode);
pub fn CFRunLoopTimerSetNextFireDate(timer: CFRunLoopTimerRef, fireDate: CFAbsoluteTime);
pub fn CFRunLoopTimerInvalidate(time: CFRunLoopTimerRef);
pub fn CFRunLoopSourceCreate(
allocator: CFAllocatorRef,
order: CFIndex,
context: *mut CFRunLoopSourceContext,
) -> CFRunLoopSourceRef;
pub fn CFRunLoopAddSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFRunLoopMode);
#[allow(dead_code)]
pub fn CFRunLoopSourceInvalidate(source: CFRunLoopSourceRef);
pub fn CFRunLoopSourceSignal(source: CFRunLoopSourceRef);
pub fn CFAbsoluteTimeGetCurrent() -> CFAbsoluteTime;
pub fn CFRelease(cftype: *const c_void);
}
pub enum CFAllocator {}
pub type CFAllocatorRef = *mut CFAllocator;
pub enum CFRunLoop {}
pub type CFRunLoopRef = *mut CFRunLoop;
pub type CFRunLoopMode = CFStringRef;
pub enum CFRunLoopObserver {}
pub type CFRunLoopObserverRef = *mut CFRunLoopObserver;
pub enum CFRunLoopTimer {}
pub type CFRunLoopTimerRef = *mut CFRunLoopTimer;
pub enum CFRunLoopSource {}
pub type CFRunLoopSourceRef = *mut CFRunLoopSource;
pub enum CFString {}
pub type CFStringRef = *const CFString;
pub type CFHashCode = c_ulong;
pub type CFIndex = c_long;
pub type CFOptionFlags = c_ulong;
pub type CFRunLoopActivity = CFOptionFlags;
pub type CFAbsoluteTime = CFTimeInterval;
pub type CFTimeInterval = f64;
#[allow(non_upper_case_globals)]
pub const kCFRunLoopEntry: CFRunLoopActivity = 0;
#[allow(non_upper_case_globals)]
pub const kCFRunLoopBeforeWaiting: CFRunLoopActivity = 1 << 5;
#[allow(non_upper_case_globals)]
pub const kCFRunLoopAfterWaiting: CFRunLoopActivity = 1 << 6;
#[allow(non_upper_case_globals)]
pub const kCFRunLoopExit: CFRunLoopActivity = 1 << 7;
pub type CFRunLoopObserverCallBack =
extern "C" fn(observer: CFRunLoopObserverRef, activity: CFRunLoopActivity, info: *mut c_void);
pub type CFRunLoopTimerCallBack = extern "C" fn(timer: CFRunLoopTimerRef, info: *mut c_void);
pub enum CFRunLoopObserverContext {}
pub enum CFRunLoopTimerContext {}
#[allow(non_snake_case)]
#[repr(C)]
pub struct CFRunLoopSourceContext {
pub version: CFIndex,
pub info: *mut c_void,
pub retain: Option<extern "C" fn(*const c_void) -> *const c_void>,
pub release: Option<extern "C" fn(*const c_void)>,
pub copyDescription: Option<extern "C" fn(*const c_void) -> CFStringRef>,
pub equal: Option<extern "C" fn(*const c_void, *const c_void) -> ffi::Boolean>,
pub hash: Option<extern "C" fn(*const c_void) -> CFHashCode>,
pub schedule: Option<extern "C" fn(*mut c_void, CFRunLoopRef, CFRunLoopMode)>,
pub cancel: Option<extern "C" fn(*mut c_void, CFRunLoopRef, CFRunLoopMode)>,
pub perform: Option<extern "C" fn(*mut c_void)>,
}
// begin is queued with the highest priority to ensure it is processed before other observers
extern "C" fn control_flow_begin_handler(
_: CFRunLoopObserverRef,
activity: CFRunLoopActivity,
_: *mut c_void,
) {
#[allow(non_upper_case_globals)]
match activity {
kCFRunLoopAfterWaiting => {
//trace!("Triggered `CFRunLoopAfterWaiting`");
AppState::wakeup();
//trace!("Completed `CFRunLoopAfterWaiting`");
}
kCFRunLoopEntry => unimplemented!(), // not expected to ever happen
_ => unreachable!(),
}
}
// end is queued with the lowest priority to ensure it is processed after other observers
// without that, LoopDestroyed would get sent after EventsCleared
extern "C" fn control_flow_end_handler(
_: CFRunLoopObserverRef,
activity: CFRunLoopActivity,
_: *mut c_void,
) {
#[allow(non_upper_case_globals)]
match activity {
kCFRunLoopBeforeWaiting => {
//trace!("Triggered `CFRunLoopBeforeWaiting`");
AppState::cleared();
//trace!("Completed `CFRunLoopBeforeWaiting`");
}
kCFRunLoopExit => (), //unimplemented!(), // not expected to ever happen
_ => unreachable!(),
}
}
struct RunLoop(CFRunLoopRef);
impl RunLoop {
unsafe fn get() -> Self {
RunLoop(CFRunLoopGetMain())
}
unsafe fn add_observer(
&self,
flags: CFOptionFlags,
priority: CFIndex,
handler: CFRunLoopObserverCallBack,
) {
let observer = CFRunLoopObserverCreate(
ptr::null_mut(),
flags,
ffi::TRUE, // Indicates we want this to run repeatedly
priority, // The lower the value, the sooner this will run
handler,
ptr::null_mut(),
);
CFRunLoopAddObserver(self.0, observer, kCFRunLoopCommonModes);
}
}
pub fn setup_control_flow_observers() {
unsafe {
let run_loop = RunLoop::get();
run_loop.add_observer(
kCFRunLoopEntry | kCFRunLoopAfterWaiting,
CFIndex::min_value(),
control_flow_begin_handler,
);
run_loop.add_observer(
kCFRunLoopExit | kCFRunLoopBeforeWaiting,
CFIndex::max_value(),
control_flow_end_handler,
);
}
}
pub struct EventLoopWaker {
timer: CFRunLoopTimerRef,
}
impl Drop for EventLoopWaker {
fn drop(&mut self) {
unsafe {
CFRunLoopTimerInvalidate(self.timer);
CFRelease(self.timer as _);
}
}
}
impl Default for EventLoopWaker {
fn default() -> EventLoopWaker {
extern "C" fn wakeup_main_loop(_timer: CFRunLoopTimerRef, _info: *mut c_void) {}
unsafe {
// Create a timer with a 0.1µs interval (1ns does not work) to mimic polling.
// It is initially setup with a first fire time really far into the
// future, but that gets changed to fire immediately in did_finish_launching
let timer = CFRunLoopTimerCreate(
ptr::null_mut(),
std::f64::MAX,
0.000_000_1,
0,
0,
wakeup_main_loop,
ptr::null_mut(),
);
CFRunLoopAddTimer(CFRunLoopGetMain(), timer, kCFRunLoopCommonModes);
EventLoopWaker { timer }
}
}
}
impl EventLoopWaker {
pub fn stop(&mut self) {
unsafe { CFRunLoopTimerSetNextFireDate(self.timer, std::f64::MAX) }
}
pub fn start(&mut self) {
unsafe { CFRunLoopTimerSetNextFireDate(self.timer, std::f64::MIN) }
}
pub fn start_at(&mut self, instant: Instant) {
let now = Instant::now();
if now >= instant {
self.start();
} else {
unsafe {
let current = CFAbsoluteTimeGetCurrent();
let duration = instant - now;
let fsecs =
duration.subsec_nanos() as f64 / 1_000_000_000.0 + duration.as_secs() as f64;
CFRunLoopTimerSetNextFireDate(self.timer, current + fsecs)
}
}
}
}
| 33.123457 | 98 | 0.654615 |
f9fc22b9050cc4b211c241c21f1d89e74e76c532 | 6,564 | //! Demo-code for showing how egui is used.
//!
//! The demo-code is also used in benchmarks and tests.
// Forbid warnings in release builds:
#![cfg_attr(not(debug_assertions), deny(warnings))]
// Disabled so we can support rust 1.51:
// #![deny(
// rustdoc::broken_intra_doc_links,
// rustdoc::invalid_codeblock_attributes,
// rustdoc::missing_crate_level_docs,
// rustdoc::private_intra_doc_links
// )]
#![forbid(unsafe_code)]
#![warn(
clippy::all,
clippy::await_holding_lock,
clippy::char_lit_as_u8,
clippy::checked_conversions,
clippy::dbg_macro,
clippy::debug_assert_with_mut_call,
clippy::doc_markdown,
clippy::empty_enum,
clippy::enum_glob_use,
clippy::exit,
clippy::expl_impl_clone_on_copy,
clippy::explicit_deref_methods,
clippy::explicit_into_iter_loop,
clippy::fallible_impl_from,
clippy::filter_map_next,
clippy::float_cmp_const,
clippy::fn_params_excessive_bools,
clippy::if_let_mutex,
clippy::imprecise_flops,
clippy::inefficient_to_string,
clippy::invalid_upcast_comparisons,
clippy::large_types_passed_by_value,
clippy::let_unit_value,
clippy::linkedlist,
clippy::lossy_float_literal,
clippy::macro_use_imports,
clippy::manual_ok_or,
clippy::map_err_ignore,
clippy::map_flatten,
clippy::match_on_vec_items,
clippy::match_same_arms,
clippy::match_wildcard_for_single_variants,
clippy::mem_forget,
clippy::mismatched_target_os,
clippy::missing_errors_doc,
clippy::missing_safety_doc,
clippy::mut_mut,
clippy::mutex_integer,
clippy::needless_borrow,
clippy::needless_continue,
clippy::needless_pass_by_value,
clippy::option_option,
clippy::path_buf_push_overwrite,
clippy::ptr_as_ptr,
clippy::ref_option_ref,
clippy::rest_pat_in_fully_bound_structs,
clippy::same_functions_in_if_condition,
clippy::string_add_assign,
clippy::string_add,
clippy::string_lit_as_bytes,
clippy::string_to_string,
clippy::todo,
clippy::trait_duplication_in_bounds,
clippy::unimplemented,
clippy::unnested_or_patterns,
clippy::unused_self,
clippy::useless_transmute,
clippy::verbose_file_reads,
clippy::zero_sized_map_values,
future_incompatible,
nonstandard_style,
rust_2018_idioms
)]
#![allow(clippy::float_cmp)]
#![allow(clippy::manual_range_contains)]
mod apps;
mod backend_panel;
pub mod easy_mark;
pub(crate) mod frame_history;
mod wrap_app;
pub use apps::ColorTest; // used for tests
pub use apps::DemoWindows; // used for tests
pub use wrap_app::WrapApp;
// ----------------------------------------------------------------------------
/// Create a [`Hyperlink`](crate::Hyperlink) to this egui source code file on github.
#[doc(hidden)]
#[macro_export]
macro_rules! __egui_github_link_file {
() => {
crate::__egui_github_link_file!("(source code)")
};
($label:expr) => {
egui::github_link_file!("https://github.com/emilk/egui/blob/master/", $label).small()
};
}
/// Create a [`Hyperlink`](crate::Hyperlink) to this egui source code file and line on github.
#[doc(hidden)]
#[macro_export]
macro_rules! __egui_github_link_file_line {
() => {
crate::__egui_github_link_file_line!("(source code)")
};
($label:expr) => {
egui::github_link_file_line!("https://github.com/emilk/egui/blob/master/", $label).small()
};
}
// ----------------------------------------------------------------------------
pub const LOREM_IPSUM: &str = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
pub const LOREM_IPSUM_LONG: &str = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst.";
// ----------------------------------------------------------------------------
#[test]
fn test_egui_e2e() {
let mut demo_windows = crate::DemoWindows::default();
let mut ctx = egui::CtxRef::default();
let raw_input = egui::RawInput::default();
const NUM_FRAMES: usize = 5;
for _ in 0..NUM_FRAMES {
ctx.begin_frame(raw_input.clone());
demo_windows.ui(&ctx);
let (_output, shapes) = ctx.end_frame();
let clipped_meshes = ctx.tessellate(shapes);
assert!(!clipped_meshes.is_empty());
}
}
#[test]
fn test_egui_zero_window_size() {
let mut demo_windows = crate::DemoWindows::default();
let mut ctx = egui::CtxRef::default();
let raw_input = egui::RawInput {
screen_rect: Some(egui::Rect::from_min_max(egui::Pos2::ZERO, egui::Pos2::ZERO)),
..Default::default()
};
const NUM_FRAMES: usize = 5;
for _ in 0..NUM_FRAMES {
ctx.begin_frame(raw_input.clone());
demo_windows.ui(&ctx);
let (_output, shapes) = ctx.end_frame();
let clipped_meshes = ctx.tessellate(shapes);
assert!(clipped_meshes.is_empty(), "There should be nothing to show");
}
}
| 40.518519 | 890 | 0.699573 |
f50f759d4c092f1281fb36dfe6a94d055a4deb3c | 1,643 | #![allow(non_upper_case_globals)]
extern crate libc;
use libc::{c_void, c_int, c_ulonglong, c_char, size_t};
include!("src/core.rs");
include!("src/crypto_aead_chacha20poly1305.rs");
include!("src/crypto_auth.rs");
include!("src/crypto_auth_hmacsha256.rs");
include!("src/crypto_auth_hmacsha512.rs");
include!("src/crypto_auth_hmacsha512256.rs");
include!("src/crypto_box.rs");
include!("src/crypto_box_curve25519xsalsa20poly1305.rs");
include!("src/crypto_core_hsalsa20.rs");
include!("src/crypto_core_salsa20.rs");
include!("src/crypto_core_salsa2012.rs");
include!("src/crypto_core_salsa208.rs");
include!("src/crypto_generichash.rs");
include!("src/crypto_generichash_blake2b.rs");
include!("src/crypto_hash.rs");
include!("src/crypto_hash_sha256.rs");
include!("src/crypto_hash_sha512.rs");
include!("src/crypto_onetimeauth.rs");
include!("src/crypto_onetimeauth_poly1305.rs");
include!("src/crypto_pwhash_scryptsalsa208sha256.rs");
include!("src/crypto_scalarmult.rs");
include!("src/crypto_scalarmult_curve25519.rs");
include!("src/crypto_secretbox_xsalsa20poly1305.rs");
include!("src/crypto_shorthash_siphash24.rs");
include!("src/crypto_sign_ed25519.rs");
include!("src/crypto_stream.rs");
include!("src/crypto_stream_aes128ctr.rs");
include!("src/crypto_stream_chacha20.rs");
include!("src/crypto_stream_salsa20.rs");
include!("src/crypto_stream_salsa2012.rs");
include!("src/crypto_stream_salsa208.rs");
include!("src/crypto_stream_xsalsa20.rs");
include!("src/crypto_verify_16.rs");
include!("src/crypto_verify_32.rs");
include!("src/crypto_verify_64.rs");
include!("src/randombytes.rs");
include!("src/utils.rs");
| 29.339286 | 57 | 0.772368 |
1104a5fde84389eb00a59979e985fb56e07155d2 | 9,566 | #[doc = r" Value read from the register"]
pub struct R {
bits: u8,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u8,
}
impl super::CHCFG3 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct SOURCER {
bits: u8,
}
impl SOURCER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Possible values of the field `TRIG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TRIGR {
#[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"]
_0,
#[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMAMUX is in Periodic Trigger mode."]
_1,
}
impl TRIGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
TRIGR::_0 => false,
TRIGR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> TRIGR {
match value {
false => TRIGR::_0,
true => TRIGR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == TRIGR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == TRIGR::_1
}
}
#[doc = "Possible values of the field `ENBL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENBLR {
#[doc = "DMA channel is disabled. This mode is primarily used during configuration of the DMAMux. The DMA has separate channel enables/disables, which should be used to disable or reconfigure a DMA channel."]
_0,
#[doc = "DMA channel is enabled"]
_1,
}
impl ENBLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENBLR::_0 => false,
ENBLR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENBLR {
match value {
false => ENBLR::_0,
true => ENBLR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == ENBLR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == ENBLR::_1
}
}
#[doc = r" Proxy"]
pub struct _SOURCEW<'a> {
w: &'a mut W,
}
impl<'a> _SOURCEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 63;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `TRIG`"]
pub enum TRIGW {
#[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"]
_0,
#[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMAMUX is in Periodic Trigger mode."]
_1,
}
impl TRIGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
TRIGW::_0 => false,
TRIGW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _TRIGW<'a> {
w: &'a mut W,
}
impl<'a> _TRIGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: TRIGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(TRIGW::_0)
}
#[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMAMUX is in Periodic Trigger mode."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(TRIGW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENBL`"]
pub enum ENBLW {
#[doc = "DMA channel is disabled. This mode is primarily used during configuration of the DMAMux. The DMA has separate channel enables/disables, which should be used to disable or reconfigure a DMA channel."]
_0,
#[doc = "DMA channel is enabled"]
_1,
}
impl ENBLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENBLW::_0 => false,
ENBLW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _ENBLW<'a> {
w: &'a mut W,
}
impl<'a> _ENBLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENBLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "DMA channel is disabled. This mode is primarily used during configuration of the DMAMux. The DMA has separate channel enables/disables, which should be used to disable or reconfigure a DMA channel."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(ENBLW::_0)
}
#[doc = "DMA channel is enabled"]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(ENBLW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bits 0:5 - DMA Channel Source (Slot)"]
#[inline]
pub fn source(&self) -> SOURCER {
let bits = {
const MASK: u8 = 63;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u8) as u8
};
SOURCER { bits }
}
#[doc = "Bit 6 - DMA Channel Trigger Enable"]
#[inline]
pub fn trig(&self) -> TRIGR {
TRIGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u8) != 0
})
}
#[doc = "Bit 7 - DMA Channel Enable"]
#[inline]
pub fn enbl(&self) -> ENBLR {
ENBLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u8) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:5 - DMA Channel Source (Slot)"]
#[inline]
pub fn source(&mut self) -> _SOURCEW {
_SOURCEW { w: self }
}
#[doc = "Bit 6 - DMA Channel Trigger Enable"]
#[inline]
pub fn trig(&mut self) -> _TRIGW {
_TRIGW { w: self }
}
#[doc = "Bit 7 - DMA Channel Enable"]
#[inline]
pub fn enbl(&mut self) -> _ENBLW {
_ENBLW { w: self }
}
}
| 27.97076 | 212 | 0.529375 |
e6c9d0b0ab000790cbd7bab015df5e5e0febe80e | 1,746 | use crate::builtin;
use rustc_hir::HirId;
use rustc_middle::{lint::LintExpectation, ty::TyCtxt};
use rustc_session::lint::LintExpectationId;
use rustc_span::symbol::sym;
pub fn check_expectations(tcx: TyCtxt<'_>) {
if !tcx.sess.features_untracked().enabled(sym::lint_reasons) {
return;
}
let fulfilled_expectations = tcx.sess.diagnostic().steal_fulfilled_expectation_ids();
let lint_expectations = &tcx.lint_levels(()).lint_expectations;
for (id, expectation) in lint_expectations {
if !fulfilled_expectations.contains(id) {
// This check will always be true, since `lint_expectations` only
// holds stable ids
if let LintExpectationId::Stable { hir_id, .. } = id {
emit_unfulfilled_expectation_lint(tcx, *hir_id, expectation);
} else {
unreachable!("at this stage all `LintExpectationId`s are stable");
}
}
}
}
fn emit_unfulfilled_expectation_lint(
tcx: TyCtxt<'_>,
hir_id: HirId,
expectation: &LintExpectation,
) {
// FIXME: The current implementation doesn't cover cases where the
// `unfulfilled_lint_expectations` is actually expected by another lint
// expectation. This can be added here by checking the lint level and
// retrieving the `LintExpectationId` if it was expected.
tcx.struct_span_lint_hir(
builtin::UNFULFILLED_LINT_EXPECTATIONS,
hir_id,
expectation.emission_span,
|diag| {
let mut diag = diag.build("this lint expectation is unfulfilled");
if let Some(rationale) = expectation.reason {
diag.note(&rationale.as_str());
}
diag.emit();
},
);
}
| 34.92 | 89 | 0.646048 |
115a68954cf2fca39424a7193384c517dc0e643a | 4,449 | use super::{scheduled_task::ScheduledTask, Schedule};
use crate::{broker::Broker, error::BeatError, protocol::TryCreateMessage};
use log::{debug, info};
use std::collections::BinaryHeap;
use std::time::{Duration, SystemTime};
const DEFAULT_SLEEP_INTERVAL: Duration = Duration::from_millis(500);
/// A [`Scheduler`] is in charge of executing scheduled tasks when they are due.
///
/// It is somehow similar to a future, in the sense that by itself it does nothing,
/// and execution is driven by an "executor" (the [`Beat`](super::Beat)) which
/// is in charge of calling the scheduler [`tick`](Scheduler::tick).
///
/// Internally it uses a min-heap to store tasks and efficiently retrieve the ones
/// that are due for execution.
pub struct Scheduler<B: Broker> {
heap: BinaryHeap<ScheduledTask>,
default_sleep_interval: Duration,
pub broker: B,
}
impl<B> Scheduler<B>
where
B: Broker,
{
/// Create a new scheduler which uses the given `broker`.
pub fn new(broker: B) -> Scheduler<B> {
Scheduler {
heap: BinaryHeap::new(),
default_sleep_interval: DEFAULT_SLEEP_INTERVAL,
broker,
}
}
/// Schedule the execution of a task.
pub fn schedule_task<S>(
&mut self,
name: String,
message_factory: Box<dyn TryCreateMessage>,
queue: String,
schedule: S,
) where
S: Schedule + 'static,
{
match schedule.next_call_at(None) {
Some(next_call_at) => self.heap.push(ScheduledTask::new(
name,
message_factory,
queue,
schedule,
next_call_at,
)),
None => debug!(
"The schedule of task {} never scheduled the task to run, so it has been dropped.",
name
),
}
}
/// Get all scheduled tasks.
pub fn get_scheduled_tasks(&mut self) -> &mut BinaryHeap<ScheduledTask> {
&mut self.heap
}
/// Get the time when the next task should be executed.
fn next_task_time(&self, now: SystemTime) -> SystemTime {
if let Some(scheduled_task) = self.heap.peek() {
debug!(
"Next scheduled task is at {:?}",
scheduled_task.next_call_at
);
scheduled_task.next_call_at
} else {
debug!(
"No scheduled tasks, sleeping for {:?}",
self.default_sleep_interval
);
now + self.default_sleep_interval
}
}
/// Tick once. This method checks if there is a scheduled task which is due
/// for execution and, if so, sends it to the broker.
/// It returns the time by which `tick` should be called again.
pub async fn tick(&mut self) -> Result<SystemTime, BeatError> {
let now = SystemTime::now();
let next_task_time = self.next_task_time(now);
if next_task_time <= now {
let mut scheduled_task = self
.heap
.pop()
.expect("No scheduled tasks found even though there should be");
let result = self.send_scheduled_task(&mut scheduled_task).await;
// Reschedule the task before checking if the task execution was successful.
// TODO: we may have more fine-grained logic here and reschedule the task
// only after examining the type of error.
if let Some(rescheduled_task) = scheduled_task.reschedule_task() {
self.heap.push(rescheduled_task);
} else {
debug!("A task is not scheduled to run anymore and will be dropped");
}
result?;
Ok(self.next_task_time(now))
} else {
Ok(next_task_time)
}
}
/// Send a task to the broker.
async fn send_scheduled_task(
&self,
scheduled_task: &mut ScheduledTask,
) -> Result<(), BeatError> {
let queue = &scheduled_task.queue;
let message = scheduled_task.message_factory.try_create_message()?;
info!(
"Sending task {}[{}] to {} queue",
scheduled_task.name,
message.task_id(),
queue
);
self.broker.send(&message, queue).await?;
scheduled_task.last_run_at.replace(SystemTime::now());
scheduled_task.total_run_count += 1;
Ok(())
}
}
| 33.201493 | 99 | 0.581479 |
6169f8b83314463aef4b40672c04ec249ec84a8c | 10,170 | //! A library to interface with libatasmart-sys. For more information about libatasmart-sys see
//! [libatasmart-sys](https://github.com/cholcombe973/libatasmart-sys)
//! This library is useful for gathering ata smart information from your hard drives concerning
//! their remaining lifetime. The underlying libatasmart doesn't expose every possible metric like
//! smartmontools but it does expose a few important ones like bad sector count and overall status.
//! This also has the advantage of avoiding CLI calls and scraping the text output which makes it
//! more reliable and also a lot more performant!
//!
use derive_error as de;
use libatasmart_sys::*;
use nix::errno::Errno;
use std::{ffi::CString, path::{Path, PathBuf}, mem::MaybeUninit};
pub use libatasmart_sys::SkSmartSelfTest;
pub extern crate nix;
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn test_new_failure() {
match Disk::new(Path::new("/dev/null")) {
Ok(_) => panic!("Opening /dev/null succeeded"),
Err(e) => assert_eq!(Errno::ENODEV, e),
}
}
/*
#[test]
fn test_smart(){
let mut disk = Disk::new(Path::new("/dev/sda")).unwrap();
let ret = disk.get_smart_status();
println!("Smart status: {:?}", ret);
println!("Dumping disk stats");
let ret = disk.dump();
}
*/
}
#[derive(Debug, de::Error)]
pub enum Err {
#[error(message_embedded, non_std, no_from)]
Err(String),
Io(std::io::Error),
}
/// Our ata smart disk
pub struct Disk {
/// The path in the filesystem to the hard drive
pub disk: PathBuf,
skdisk: *mut SkDisk,
}
impl Disk {
/// This will initialize a new Disk by asking libatasmart to open it.
/// Note that this requires root permissions usually to succeed.
pub fn new(disk_path: &Path) -> Result<Disk, Errno> {
let device = CString::new(disk_path.to_str().unwrap()).unwrap();
let mut disk = MaybeUninit::<SkDisk>::uninit().as_mut_ptr();
unsafe {
let ret = libatasmart_sys::sk_disk_open(device.as_ptr(), &mut disk);
if ret < 0 {
// Do not call sk_disk_free here, sk_disk_open already did that.
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
let ret = libatasmart_sys::sk_disk_smart_read_data(disk);
if ret < 0 {
let fail = nix::errno::errno();
sk_disk_free(disk);
return Err(Errno::from_i32(fail));
}
Ok(Disk {
disk: disk_path.to_path_buf(),
skdisk: disk,
})
}
}
/// Refreshes cached SMART attribute values.
///
/// SMART attribute values are read once in Disk::new and cached. Methods such as
/// `get_temperature` use these cached values and do not access the disk. Call this method to
/// refresh the cached values.
///
/// Note: calling this method might cause the disk to wake up from sleep. Consider checking if
/// the disk is asleep using `check_sleep_mode` before calling this method to avoid this.
pub fn refresh_smart_data(&mut self) -> Result<(), Errno> {
unsafe {
let ret = sk_disk_smart_read_data(self.skdisk);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(())
}
}
/// Returns a u64 representing the size of the disk in bytes.
pub fn get_disk_size(&mut self) -> Result<u64, Errno> {
unsafe {
let mut bytes: u64 = 0;
let ret = sk_disk_get_size(self.skdisk, &mut bytes);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
return Ok(bytes);
}
}
/// Returns a bool of true if sleep mode is supported, false otherwise.
pub fn check_sleep_mode(&mut self) -> Result<bool, Errno> {
unsafe {
let mut mode: SkBool = 0;
let ret = sk_disk_check_sleep_mode(self.skdisk, &mut mode);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
if mode == 0 {
Ok(false)
} else {
Ok(true)
}
}
}
/// Returns a u64 representing the power on time in milliseconds
pub fn get_power_on(&mut self) -> Result<u64, Errno> {
unsafe {
let mut power_on_time: u64 = 0;
let ret = sk_disk_smart_get_power_on(self.skdisk, &mut power_on_time);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(power_on_time)
}
}
/// Returns a u64 representing the number of power on cycles
pub fn get_power_cycle_count(&mut self) -> Result<u64, Errno> {
unsafe {
let mut power_cycle_count: u64 = 0;
let ret = sk_disk_smart_get_power_cycle(self.skdisk, &mut power_cycle_count);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(power_cycle_count)
}
}
/// Returns a u64 representing the number of bad sections on the disk
pub fn get_bad_sectors(&mut self) -> Result<u64, Errno> {
unsafe {
let mut bad_sector_count: u64 = 0;
let ret = sk_disk_smart_get_bad(self.skdisk, &mut bad_sector_count);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(bad_sector_count)
}
}
/// Returns a u64 representing the mkelvin of the disk
pub fn get_temperature(&mut self) -> Result<u64, Errno> {
unsafe {
let mut mkelvin: u64 = 0;
let ret = sk_disk_smart_get_temperature(self.skdisk, &mut mkelvin);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(mkelvin)
}
}
/// Returns true if the disk passed smart, false otherwise.
pub fn get_smart_status(&mut self) -> Result<bool, Errno> {
unsafe {
let mut good: SkBool = 0;
let ret = sk_disk_smart_status(self.skdisk, &mut good);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
if good == 0 {
Ok(false)
} else {
Ok(true)
}
}
}
/// This will dump all available information to stdout about the drive
pub fn dump(&mut self) -> Result<(), Errno> {
unsafe {
let ret = sk_disk_dump(self.skdisk);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(())
}
}
pub fn identify_is_available(&mut self) -> Result<bool, Errno> {
unsafe {
let mut available: SkBool = 0;
let ret = sk_disk_identify_is_available(self.skdisk, &mut available);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
if available == 0 {
Ok(false)
} else {
Ok(true)
}
}
}
/// Query the device and return whether or not smart is supported on it
pub fn smart_is_available(&mut self) -> Result<bool, Errno> {
unsafe {
let mut available: SkBool = 0;
let ret = sk_disk_smart_is_available(self.skdisk, &mut available);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
if available == 0 {
Ok(false)
} else {
Ok(true)
}
}
}
pub fn execute_smart_self_test(&mut self, test_type: SkSmartSelfTest) -> Result<(), Errno> {
unsafe {
let ret = sk_disk_smart_self_test(self.skdisk, test_type);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(())
}
}
pub fn smart_get_overall(&mut self) -> Result<SkSmartOverall, Errno> {
unsafe {
let mut overall: SkSmartOverall = SkSmartOverall::SK_SMART_OVERALL_GOOD;
let ret = sk_disk_smart_get_overall(self.skdisk, &mut overall);
if ret < 0 {
let fail = nix::errno::errno();
return Err(Errno::from_i32(fail));
}
Ok(overall)
}
}
}
impl Drop for Disk {
fn drop(&mut self) {
unsafe {
sk_disk_free(self.skdisk);
}
}
}
/*
pub fn sk_smart_self_test_execution_status_to_string(status: SkSmartSelfTestExecutionStatus) -> *const ::libc::c_char;
pub fn sk_smart_offline_data_collection_status_to_string(status: SkSmartOfflineDataCollectionStatus) -> *const ::libc::c_char;
pub fn sk_smart_self_test_to_string(test: SkSmartSelfTest) -> *const ::libc::c_char;
pub fn sk_smart_self_test_polling_minutes(d: *const SkSmartParsedData, test: SkSmartSelfTest) -> uint32_t;
pub fn sk_smart_self_test_available(d: *const SkSmartParsedData, test: SkSmartSelfTest) -> SkBool;
pub fn sk_disk_identify_parse(d: *mut *mut SkDisk, data: *const SkIdentifyParsedData) -> ::libc::c_int;
pub fn sk_disk_smart_read_data(d: *mut SkDisk) -> ::libc::c_int;
pub fn sk_disk_get_blob(d: *mut *mut SkDisk, blob: *const ::libc::c_void, size: *mut size_t) -> ::libc::c_int;
pub fn sk_disk_set_blob(d: *mut SkDisk, blob: *const ::libc::c_void, size: size_t) -> ::libc::c_int;
pub fn sk_disk_smart_parse(d: *mut *mut SkDisk, data: *const SkSmartParsedData) -> ::libc::c_int;
*/
| 34.948454 | 126 | 0.558997 |
eb1b999b3b19431b9537208a663219f9b1926d49 | 3,841 | //! Chain parameters, can be modified by proposals.
use config::ChainParameterConfig;
#[doc(inline)]
pub use proto::state::ChainParameter;
pub fn default_parameters() -> impl IntoIterator<Item = (ChainParameter, i64)> {
use ChainParameter::*;
return vec![
(MaintenanceInterval, 21600_000),
(MaxCpuTimeOfOneTxn, 50),
(RemovePowerOfGr, 0),
(AllowUpdateAccountName, 0),
(AllowSameTokenName, 0),
(AllowDelegateResource, 0),
(AllowMultisig, 0),
(AllowAccountStateRoot, 0),
(AllowChangeDelegation, 0),
(AllowTvm, 0),
(ForbidTransferToContract, 0),
(BandwidthPrice, 10),
(EnergyPrice, 100),
(WitnessCreateFee, 9999_000_000),
(AccountCreateFee, 100_000),
(AccountPermissionUpdateFee, 100_000_000),
(AssetIssueFee, 1024_000_000),
(ExchangeCreateFee, 1024_000_000),
(MultisigFee, 1_000_000),
(CreateNewAccountFeeInSystemContract, 0),
(CreateNewAccountBandwidthRate, 1),
(TotalEnergyLimit, 50_000_000_000),
// = TotalEnergyLimit
// (TotalEnergyCurrentLimit, 50_000_000_000),
(AllowAdaptiveEnergy, 0),
(AdaptiveResourceLimitTargetRatio, 14400),
(AdaptiveResourceLimitMultiplier, 1_000),
// Only used before AllowChangeDelegation
(StandbyWitnessAllowance, 115_200_000_000),
(WitnessPayPerBlock, 32_000_000),
(StandbyWitnessPayPerBlock, 16_000_000),
(AllowTvmTransferTrc10Upgrade, 0),
(AllowTvmConstantinopleUpgrade, 0),
(AllowTvmSolidity059Upgrade, 0),
(AllowTvmShieldedUpgrade, 0),
(AllowTvmIstanbulUpgrade, 0),
(AllowProtoFilterNum, 0),
];
}
pub fn default_parameters_from_config(
config: &ChainParameterConfig,
) -> impl IntoIterator<Item = (ChainParameter, i64)> {
use ChainParameter::*;
return vec![
(MaintenanceInterval, config.maintenance_interval),
(MaxCpuTimeOfOneTxn, 50),
(RemovePowerOfGr, 0),
(AllowUpdateAccountName, 0),
(AllowSameTokenName, config.allow_duplicate_asset_names as i64),
(AllowDelegateResource, config.allow_delegate_resource as i64),
(AllowMultisig, config.allow_multisig as i64),
(AllowAccountStateRoot, 0),
(AllowChangeDelegation, 0),
(AllowTvm, config.allow_tvm as i64),
(ForbidTransferToContract, 0),
(BandwidthPrice, config.bandwidth_price),
(EnergyPrice, config.energy_price),
(WitnessCreateFee, 9999_000_000),
(AccountCreateFee, 100_000),
(AccountPermissionUpdateFee, 100_000_000),
(AssetIssueFee, 1024_000_000),
(ExchangeCreateFee, 1024_000_000),
(MultisigFee, 1_000_000),
(CreateNewAccountFeeInSystemContract, 0),
(CreateNewAccountBandwidthRate, 1),
(TotalEnergyLimit, 50_000_000_000),
// Same as TotalEnergyLimit,
(TotalEnergyCurrentLimit, 50_000_000_000),
(AllowAdaptiveEnergy, config.allow_adaptive_energy as i64),
(AdaptiveResourceLimitTargetRatio, 14400),
(AdaptiveResourceLimitMultiplier, 1_000),
(WitnessPayPerBlock, 32_000_000),
(StandbyWitnessAllowance, 115_200_000_000),
(StandbyWitnessPayPerBlock, 16_000_000),
(
AllowTvmTransferTrc10Upgrade,
config.allow_tvm_transfer_trc10_upgrade as i64,
),
(
AllowTvmConstantinopleUpgrade,
config.allow_tvm_constantinople_upgrade as i64,
),
(AllowTvmSolidity059Upgrade, config.allow_tvm_solidity_059_upgrade as i64),
(AllowTvmShieldedUpgrade, config.allow_tvm_shielded_upgrade as i64),
(AllowTvmIstanbulUpgrade, config.allow_tvm_istanbul_upgrade as i64),
(AllowProtoFilterNum, 0),
];
}
| 37.656863 | 83 | 0.668316 |
cc6fcf165bc5e5b3fefa541fd321c79e6e0663d3 | 2,335 | //-
// Copyright (c) 2016, Jason Lingle
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
use gl::types::*;
use super::vbo::{Vbo,ActiveVbo};
use super::uniform::Uniform;
use super::vertex::Vertex;
use super::shader::{Shader,ActiveShader};
#[derive(Debug)]
pub struct Vao<'a, 'b: 'a, U: Uniform + 'b, V: Vertex + 'b> {
handle: GLuint,
shader: &'a Shader<'b,U,V>,
vbo: &'a Vbo<V>,
}
impl<'a, 'b: 'a, U: Uniform + 'b, V: Vertex + 'b> Drop for Vao<'a,'b,U,V> {
fn drop(&mut self) {
unsafe {
gl!(DeleteVertexArrays, 1, &self.handle);
} }
}
impl<'a, 'b: 'a, U: Uniform + 'b, V: Vertex + 'b> Vao<'a,'b,U,V> {
pub fn new(shader: &'a Shader<'b,U,V>,
vbo: &ActiveVbo<'a,V>) -> Result<Self,String> {
let this = try!(Vao::alloc(shader, vbo.0));
this.make_current();
this.set_up();
Ok(this)
}
fn alloc(shader: &'a Shader<'b,U,V>,
vbo: &'a Vbo<V>) -> Result<Self,String> {
unsafe {
let mut raw = 0 as GLuint;
gl!(GenVertexArrays, 1, &mut raw);
if 0 == raw {
Err("Failed to allocate VAO".to_string())
} else {
Ok(Vao { handle: raw, vbo: vbo, shader: shader })
}
} }
fn set_up(&self) {
V::install(self.shader.vertex_binding());
}
pub fn make_current(&self) {
unsafe {
gl!(BindVertexArray, self.handle);
} }
pub fn activate<'c>(&'c self, uniform: &U)
-> ActiveShader<'c,'b,U,V> {
let ret = self.shader.activate(uniform);
self.make_current();
ret
}
}
| 31.554054 | 79 | 0.587152 |
5090606aef13a40ce0af03156c679c3f7e3bf271 | 9,012 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
//! This documentation was generated from *AdMob* crate version *2.0.5+20210331*, where *20210331* is the exact revision of the *admob:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v2.0.5*.
//!
//! Everything else about the *AdMob* *v1* API can be found at the
//! [official documentation site](https://developers.google.com/admob/api/).
//! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/admob1).
//! # Features
//!
//! Handle the following *Resources* with ease from the central [hub](AdMob) ...
//!
//! * accounts
//! * [*get*](api::AccountGetCall), [*list*](api::AccountListCall), [*mediation report generate*](api::AccountMediationReportGenerateCall) and [*network report generate*](api::AccountNetworkReportGenerateCall)
//!
//!
//!
//!
//! Not what you are looking for ? Find all other Google APIs in their Rust [documentation index](http://byron.github.io/google-apis-rs).
//!
//! # Structure of this Library
//!
//! The API is structured into the following primary items:
//!
//! * **[Hub](AdMob)**
//! * a central object to maintain state and allow accessing all *Activities*
//! * creates [*Method Builders*](client::MethodsBuilder) which in turn
//! allow access to individual [*Call Builders*](client::CallBuilder)
//! * **[Resources](client::Resource)**
//! * primary types that you can apply *Activities* to
//! * a collection of properties and *Parts*
//! * **[Parts](client::Part)**
//! * a collection of properties
//! * never directly used in *Activities*
//! * **[Activities](client::CallBuilder)**
//! * operations to apply to *Resources*
//!
//! All *structures* are marked with applicable traits to further categorize them and ease browsing.
//!
//! Generally speaking, you can invoke *Activities* like this:
//!
//! ```Rust,ignore
//! let r = hub.resource().activity(...).doit().await
//! ```
//!
//! Or specifically ...
//!
//! ```ignore
//! let r = hub.accounts().get(...).doit().await
//! ```
//!
//! The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities`
//! supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be
//! specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired.
//! The `doit()` method performs the actual communication with the server and returns the respective result.
//!
//! # Usage
//!
//! ## Setting up your Project
//!
//! To use this library, you would put the following lines into your `Cargo.toml` file:
//!
//! ```toml
//! [dependencies]
//! google-admob1 = "*"
//! hyper = "^0.14"
//! hyper-rustls = "^0.22"
//! serde = "^1.0"
//! serde_json = "^1.0"
//! yup-oauth2 = "^5.0"
//! ```
//!
//! ## A complete example
//!
//! ```test_harness,no_run
//! extern crate hyper;
//! extern crate hyper_rustls;
//! extern crate yup_oauth2 as oauth2;
//! extern crate google_admob1 as admob1;
//! use admob1::{Result, Error};
//! # async fn dox() {
//! use std::default::Default;
//! use oauth2;
//! use admob1::AdMob;
//!
//! // Get an ApplicationSecret instance by some means. It contains the `client_id` and
//! // `client_secret`, among other things.
//! let secret: oauth2::ApplicationSecret = Default::default();
//! // Instantiate the authenticator. It will choose a suitable authentication flow for you,
//! // unless you replace `None` with the desired Flow.
//! // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
//! // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
//! // retrieve them from storage.
//! let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
//! secret,
//! yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
//! ).build().await.unwrap();
//! let mut hub = AdMob::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
//! // You can configure optional parameters by calling the respective setters at will, and
//! // execute the final call using `doit()`.
//! // Values shown here are possibly random and not representative !
//! let result = hub.accounts().get("name")
//! .doit().await;
//!
//! match result {
//! Err(e) => match e {
//! // The Error enum provides details about what exactly happened.
//! // You can also just use its `Debug`, `Display` or `Error` traits
//! Error::HttpError(_)
//! |Error::Io(_)
//! |Error::MissingAPIKey
//! |Error::MissingToken(_)
//! |Error::Cancelled
//! |Error::UploadSizeLimitExceeded(_, _)
//! |Error::Failure(_)
//! |Error::BadRequest(_)
//! |Error::FieldClash(_)
//! |Error::JsonDecodeError(_, _) => println!("{}", e),
//! },
//! Ok(res) => println!("Success: {:?}", res),
//! }
//! # }
//! ```
//! ## Handling Errors
//!
//! All errors produced by the system are provided either as [Result](client::Result) enumeration as return value of
//! the doit() methods, or handed as possibly intermediate results to either the
//! [Hub Delegate](client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html).
//!
//! When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This
//! makes the system potentially resilient to all kinds of errors.
//!
//! ## Uploads and Downloads
//! If a method supports downloads, the response body, which is part of the [Result](client::Result), should be
//! read by you to obtain the media.
//! If such a method also supports a [Response Result](client::ResponseResult), it will return that by default.
//! You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making
//! this call: `.param("alt", "media")`.
//!
//! Methods supporting uploads can do so using up to 2 different protocols:
//! *simple* and *resumable*. The distinctiveness of each is represented by customized
//! `doit(...)` methods, which are then named `upload(...)` and `upload_resumable(...)` respectively.
//!
//! ## Customization and Callbacks
//!
//! You may alter the way an `doit()` method is called by providing a [delegate](client::Delegate) to the
//! [Method Builder](client::CallBuilder) before making the final `doit()` call.
//! Respective methods will be called to provide progress information, as well as determine whether the system should
//! retry on failure.
//!
//! The [delegate trait](client::Delegate) is default-implemented, allowing you to customize it with minimal effort.
//!
//! ## Optional Parts in Server-Requests
//!
//! All structures provided by this library are made to be [encodable](client::RequestValue) and
//! [decodable](client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses
//! are valid.
//! Most optionals are are considered [Parts](client::Part) which are identifiable by name, which will be sent to
//! the server to indicate either the set parts of the request or the desired parts in the response.
//!
//! ## Builder Arguments
//!
//! Using [method builders](client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods.
//! These will always take a single argument, for which the following statements are true.
//!
//! * [PODs][wiki-pod] are handed by copy
//! * strings are passed as `&str`
//! * [request values](client::RequestValue) are moved
//!
//! Arguments will always be copied or cloned into the builder, to make them independent of their original life times.
//!
//! [wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure
//! [builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern
//! [google-go-api]: https://github.com/google/google-api-go-client
//!
//!
// Unused attributes happen thanks to defined, but unused structures
// We don't warn about this, as depending on the API, some data structures or facilities are never used.
// Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any
// unused imports in fully featured APIs. Same with unused_mut ... .
#![allow(unused_imports, unused_mut, dead_code)]
// DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde;
extern crate serde_json;
extern crate yup_oauth2 as oauth2;
extern crate mime;
extern crate url;
pub mod api;
pub mod client;
// Re-export the hub type and some basic client structs
pub use api::AdMob;
pub use client::{Result, Error, Delegate};
| 43.960976 | 221 | 0.678429 |
899218d037e7da4cf5d86588f77e1f94e0e5d6ae | 1,076 | #[doc = "Reader of register DSCR2"]
pub type R = crate::R<u32, super::DSCR2>;
#[doc = "Writer for register DSCR2"]
pub type W = crate::W<u32, super::DSCR2>;
#[doc = "Register DSCR2 `reset()`'s with value 0"]
impl crate::ResetValue for super::DSCR2 {
#[inline(always)]
fn reset_value() -> Self::Ux { 0 }
}
#[doc = "Reader of field `DSCR`"]
pub type DSCR_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `DSCR`"]
pub struct DSCR_W<'a> { w: &'a mut W }
impl<'a> DSCR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x3fff_ffff << 2)) | (((value as u32) & 0x3fff_ffff) << 2);
self.w
}
}
impl R {
#[doc = "Bits 2:31 - Buffer Transfer Descriptor Address"]
#[inline(always)]
pub fn dscr(&self) -> DSCR_R { DSCR_R::new(((self.bits >> 2) & 0x3fff_ffff) as u32) }
}
impl W {
#[doc = "Bits 2:31 - Buffer Transfer Descriptor Address"]
#[inline(always)]
pub fn dscr(&mut self) -> DSCR_W { DSCR_W { w: self } }
} | 29.081081 | 98 | 0.58829 |
fbb99f30b8ff88ecda3b3adb11898d99f31255d3 | 5,806 | use ibc_proto::ibc::core::commitment::v1::MerkleProof;
use crate::core::ics02_client::client_consensus::AnyConsensusState;
use crate::core::ics02_client::client_def::ClientDef;
use crate::core::ics02_client::client_state::AnyClientState;
use crate::core::ics02_client::context::ClientReader;
use crate::core::ics02_client::error::Error;
use crate::core::ics03_connection::connection::ConnectionEnd;
use crate::core::ics04_channel::channel::ChannelEnd;
use crate::core::ics04_channel::context::ChannelReader;
use crate::core::ics04_channel::msgs::acknowledgement::Acknowledgement;
use crate::core::ics04_channel::packet::Sequence;
use crate::core::ics23_commitment::commitment::{
CommitmentPrefix, CommitmentProofBytes, CommitmentRoot,
};
use crate::core::ics23_commitment::merkle::apply_prefix;
use crate::core::ics24_host::identifier::{ChannelId, ClientId, ConnectionId, PortId};
use crate::core::ics24_host::path::ClientConsensusStatePath;
use crate::core::ics24_host::Path;
use crate::mock::client_state::{MockClientState, MockConsensusState};
use crate::mock::header::MockHeader;
use crate::prelude::*;
use crate::Height;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MockClient;
impl ClientDef for MockClient {
type Header = MockHeader;
type ClientState = MockClientState;
type ConsensusState = MockConsensusState;
fn check_header_and_update_state(
&self,
_ctx: &dyn ClientReader,
_client_id: ClientId,
client_state: Self::ClientState,
header: Self::Header,
) -> Result<(Self::ClientState, Self::ConsensusState), Error> {
if client_state.latest_height() >= header.height() {
return Err(Error::low_header_height(
header.height(),
client_state.latest_height(),
));
}
Ok((
MockClientState::new(header),
MockConsensusState::new(header),
))
}
fn verify_client_consensus_state(
&self,
_client_state: &Self::ClientState,
_height: Height,
prefix: &CommitmentPrefix,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
client_id: &ClientId,
consensus_height: Height,
_expected_consensus_state: &AnyConsensusState,
) -> Result<(), Error> {
let client_prefixed_path = Path::ClientConsensusState(ClientConsensusStatePath {
client_id: client_id.clone(),
epoch: consensus_height.revision_number,
height: consensus_height.revision_height,
})
.to_string();
let _path = apply_prefix(prefix, vec![client_prefixed_path]);
Ok(())
}
fn verify_connection_state(
&self,
_client_state: &Self::ClientState,
_height: Height,
_prefix: &CommitmentPrefix,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_connection_id: &ConnectionId,
_expected_connection_end: &ConnectionEnd,
) -> Result<(), Error> {
Ok(())
}
fn verify_channel_state(
&self,
_client_state: &Self::ClientState,
_height: Height,
_prefix: &CommitmentPrefix,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_port_id: &PortId,
_channel_id: &ChannelId,
_expected_channel_end: &ChannelEnd,
) -> Result<(), Error> {
Ok(())
}
fn verify_client_full_state(
&self,
_client_state: &Self::ClientState,
_height: Height,
_prefix: &CommitmentPrefix,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_client_id: &ClientId,
_expected_client_state: &AnyClientState,
) -> Result<(), Error> {
Ok(())
}
fn verify_packet_data(
&self,
_ctx: &dyn ChannelReader,
_client_state: &Self::ClientState,
_height: Height,
_connection_end: &ConnectionEnd,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_port_id: &PortId,
_channel_id: &ChannelId,
_sequence: Sequence,
_commitment: String,
) -> Result<(), Error> {
Ok(())
}
fn verify_packet_acknowledgement(
&self,
_ctx: &dyn ChannelReader,
_client_state: &Self::ClientState,
_height: Height,
_connection_end: &ConnectionEnd,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_port_id: &PortId,
_channel_id: &ChannelId,
_sequence: Sequence,
_ack: Acknowledgement,
) -> Result<(), Error> {
Ok(())
}
fn verify_next_sequence_recv(
&self,
_ctx: &dyn ChannelReader,
_client_state: &Self::ClientState,
_height: Height,
_connection_end: &ConnectionEnd,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_port_id: &PortId,
_channel_id: &ChannelId,
_sequence: Sequence,
) -> Result<(), Error> {
Ok(())
}
fn verify_packet_receipt_absence(
&self,
_ctx: &dyn ChannelReader,
_client_state: &Self::ClientState,
_height: Height,
_connection_end: &ConnectionEnd,
_proof: &CommitmentProofBytes,
_root: &CommitmentRoot,
_port_id: &PortId,
_channel_id: &ChannelId,
_sequence: Sequence,
) -> Result<(), Error> {
Ok(())
}
fn verify_upgrade_and_update_state(
&self,
client_state: &Self::ClientState,
consensus_state: &Self::ConsensusState,
_proof_upgrade_client: MerkleProof,
_proof_upgrade_consensus_state: MerkleProof,
) -> Result<(Self::ClientState, Self::ConsensusState), Error> {
Ok((*client_state, consensus_state.clone()))
}
}
| 31.048128 | 88 | 0.631932 |
e47cc3d133a51076dfc57133ab8c073593ce18cd | 4,229 | extern crate serde;
use self::serde::de::value::{MapDeserializer, SeqDeserializer};
use self::serde::de::{
Deserialize, Deserializer, Error, IntoDeserializer, MapAccess, SeqAccess, Visitor,
};
use self::serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
use std::fmt::{self, Formatter};
use std::hash::{BuildHasher, Hash};
use std::marker::PhantomData;
use IndexMap;
/// Requires crate feature `"serde-1"`
impl<K, V, S> Serialize for IndexMap<K, V, S>
where
K: Serialize + Hash + Eq,
V: Serialize,
S: BuildHasher,
{
fn serialize<T>(&self, serializer: T) -> Result<T::Ok, T::Error>
where
T: Serializer,
{
let mut map_serializer = serializer.serialize_map(Some(self.len()))?;
for (key, value) in self {
map_serializer.serialize_entry(key, value)?;
}
map_serializer.end()
}
}
struct OrderMapVisitor<K, V, S>(PhantomData<(K, V, S)>);
impl<'de, K, V, S> Visitor<'de> for OrderMapVisitor<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: Default + BuildHasher,
{
type Value = IndexMap<K, V, S>;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut values =
IndexMap::with_capacity_and_hasher(map.size_hint().unwrap_or(0), S::default());
while let Some((key, value)) = map.next_entry()? {
values.insert(key, value);
}
Ok(values)
}
}
/// Requires crate feature `"serde-1"`
impl<'de, K, V, S> Deserialize<'de> for IndexMap<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: Default + BuildHasher,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(OrderMapVisitor(PhantomData))
}
}
impl<'de, K, V, S, E> IntoDeserializer<'de, E> for IndexMap<K, V, S>
where
K: IntoDeserializer<'de, E> + Eq + Hash,
V: IntoDeserializer<'de, E>,
S: BuildHasher,
E: Error,
{
type Deserializer = MapDeserializer<'de, <Self as IntoIterator>::IntoIter, E>;
fn into_deserializer(self) -> Self::Deserializer {
MapDeserializer::new(self.into_iter())
}
}
use IndexSet;
/// Requires crate feature `"serde-1"`
impl<T, S> Serialize for IndexSet<T, S>
where
T: Serialize + Hash + Eq,
S: BuildHasher,
{
fn serialize<Se>(&self, serializer: Se) -> Result<Se::Ok, Se::Error>
where
Se: Serializer,
{
let mut set_serializer = serializer.serialize_seq(Some(self.len()))?;
for value in self {
set_serializer.serialize_element(value)?;
}
set_serializer.end()
}
}
struct OrderSetVisitor<T, S>(PhantomData<(T, S)>);
impl<'de, T, S> Visitor<'de> for OrderSetVisitor<T, S>
where
T: Deserialize<'de> + Eq + Hash,
S: Default + BuildHasher,
{
type Value = IndexSet<T, S>;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "a set")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut values =
IndexSet::with_capacity_and_hasher(seq.size_hint().unwrap_or(0), S::default());
while let Some(value) = seq.next_element()? {
values.insert(value);
}
Ok(values)
}
}
/// Requires crate feature `"serde-1"`
impl<'de, T, S> Deserialize<'de> for IndexSet<T, S>
where
T: Deserialize<'de> + Eq + Hash,
S: Default + BuildHasher,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(OrderSetVisitor(PhantomData))
}
}
impl<'de, T, S, E> IntoDeserializer<'de, E> for IndexSet<T, S>
where
T: IntoDeserializer<'de, E> + Eq + Hash,
S: BuildHasher,
E: Error,
{
type Deserializer = SeqDeserializer<<Self as IntoIterator>::IntoIter, E>;
fn into_deserializer(self) -> Self::Deserializer {
SeqDeserializer::new(self.into_iter())
}
}
| 25.475904 | 91 | 0.608182 |
48bbadac472483a52626718a97538c5ee132b315 | 2,054 | // Copyright 2018 Rust-NumExt Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![recursion_limit = "512"]
extern crate proc_macro;
#[macro_use]
extern crate quote;
#[macro_use]
mod utils;
mod definition;
mod fixed_hash;
mod fixed_uint;
use quote::quote;
use syn::parse_macro_input;
#[proc_macro]
pub fn construct_fixed_uints(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let inputs = parse_macro_input!(input as definition::Definitions);
let expanded = {
inputs
.inner
.into_iter()
.map(|input| {
let parsed: fixed_uint::parsed::UintDefinition = input.into();
fixed_uint::core::UintConstructor::new(parsed)
})
.fold((quote!(), Vec::new()), |(uints, mut ucs), uc| {
let (uint, public) = uc.construct_all(&ucs[..]);
let uints = quote!(#uints #public #uint);
ucs.push(uc);
(uints, ucs)
})
.0
};
expanded.into()
}
#[proc_macro]
pub fn construct_fixed_hashes(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let inputs = parse_macro_input!(input as definition::Definitions);
let expanded = {
inputs
.inner
.into_iter()
.map(|input| {
let parsed: fixed_hash::parsed::HashDefinition = input.into();
fixed_hash::core::HashConstructor::new(parsed)
})
.fold((quote!(), Vec::new()), |(hashes, mut ucs), uc| {
let (hash, public) = uc.construct_all(&ucs[..]);
let hashes = quote!(#hashes #public #hash);
ucs.push(uc);
(hashes, ucs)
})
.0
};
expanded.into()
}
| 30.205882 | 90 | 0.572055 |
678e33301460eb742764900d56c6057b7bb34ba7 | 20,348 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
mod error;
pub use error::Error;
use anyhow::Result;
use diem_crypto::{
ed25519::Ed25519Signature,
hash::{
EventAccumulatorHasher, TransactionAccumulatorHasher, ACCUMULATOR_PLACEHOLDER_HASH,
SPARSE_MERKLE_PLACEHOLDER_HASH,
},
HashValue,
};
use diem_types::{
account_address::AccountAddress,
account_state_blob::AccountStateBlob,
contract_event::ContractEvent,
epoch_state::EpochState,
ledger_info::LedgerInfoWithSignatures,
nibble::nibble_path::NibblePath,
on_chain_config,
proof::{accumulator::InMemoryAccumulator, AccumulatorExtensionProof},
transaction::{
default_protocol::{TransactionListWithProof, TransactionOutputListWithProof},
Transaction, TransactionInfo, TransactionOutput, TransactionStatus, TransactionToCommit,
Version,
},
write_set::WriteSet,
};
use scratchpad::ProofRead;
use serde::{Deserialize, Serialize};
use std::{cmp::max, collections::HashMap, sync::Arc};
use storage_interface::TreeState;
type SparseMerkleProof = diem_types::proof::SparseMerkleProof<AccountStateBlob>;
type SparseMerkleTree = scratchpad::SparseMerkleTree<AccountStateBlob>;
pub trait ChunkExecutor: Send + Sync {
/// Verifies the transactions based on the provided proofs and ledger info. If the transactions
/// are valid, executes them and returns the executed result for commit.
fn execute_chunk(
&self,
txn_list_with_proof: TransactionListWithProof,
// Target LI that has been verified independently: the proofs are relative to this version.
verified_target_li: LedgerInfoWithSignatures,
) -> Result<(
ProcessedVMOutput,
Vec<TransactionToCommit>,
Vec<ContractEvent>,
)>;
/// Similar to `execute_chunk`, but instead of executing transactions, apply the transaction
/// outputs directly to get the executed result.
fn apply_chunk(
&self,
txn_output_list_with_proof: TransactionOutputListWithProof,
// Target LI that has been verified independently: the proofs are relative to this version.
verified_target_li: LedgerInfoWithSignatures,
) -> anyhow::Result<(
ProcessedVMOutput,
Vec<TransactionToCommit>,
Vec<ContractEvent>,
)>;
/// Commit a previously executed chunks, returns a vector of reconfiguration events in the chunk.
fn commit_chunk(
&self,
verified_target_li: LedgerInfoWithSignatures,
// An optional end of epoch LedgerInfo. We do not allow chunks that end epoch without
// carrying any epoch change LI.
epoch_change_li: Option<LedgerInfoWithSignatures>,
output: ProcessedVMOutput,
txns_to_commit: Vec<TransactionToCommit>,
events: Vec<ContractEvent>,
) -> anyhow::Result<Vec<ContractEvent>>;
fn execute_or_apply_chunk(
&self,
first_version: u64,
transactions: Vec<Transaction>,
transaction_outputs: Option<Vec<TransactionOutput>>,
transaction_infos: Vec<TransactionInfo>,
) -> Result<(
ProcessedVMOutput,
Vec<TransactionToCommit>,
Vec<ContractEvent>,
)>;
fn execute_and_commit_chunk(
&self,
txn_list_with_proof: TransactionListWithProof,
verified_target_li: LedgerInfoWithSignatures,
epoch_change_li: Option<LedgerInfoWithSignatures>,
) -> Result<Vec<ContractEvent>> {
let (output, txns_to_commit, events) =
self.execute_chunk(txn_list_with_proof, verified_target_li.clone())?;
self.commit_chunk(
verified_target_li,
epoch_change_li,
output,
txns_to_commit,
events,
)
}
fn apply_and_commit_chunk(
&self,
txn_output_list_with_proof: TransactionOutputListWithProof,
verified_target_li: LedgerInfoWithSignatures,
epoch_change_li: Option<LedgerInfoWithSignatures>,
) -> Result<Vec<ContractEvent>> {
let (output, txns_to_commit, events) =
self.apply_chunk(txn_output_list_with_proof, verified_target_li.clone())?;
self.commit_chunk(
verified_target_li,
epoch_change_li,
output,
txns_to_commit,
events,
)
}
}
pub trait BlockExecutor: Send + Sync {
/// Get the latest committed block id
fn committed_block_id(&self) -> Result<HashValue, Error>;
/// Reset the internal state including cache with newly fetched latest committed block from storage.
fn reset(&self) -> Result<(), Error>;
/// Executes a block.
fn execute_block(
&self,
block: (HashValue, Vec<Transaction>),
parent_block_id: HashValue,
) -> Result<StateComputeResult, Error>;
/// Saves eligible blocks to persistent storage.
/// If we have multiple blocks and not all of them have signatures, we may send them to storage
/// in a few batches. For example, if we have
/// ```text
/// A <- B <- C <- D <- E
/// ```
/// and only `C` and `E` have signatures, we will send `A`, `B` and `C` in the first batch,
/// then `D` and `E` later in the another batch.
/// Commits a block and all its ancestors in a batch manner.
fn commit_blocks(
&self,
block_ids: Vec<HashValue>,
ledger_info_with_sigs: LedgerInfoWithSignatures,
) -> Result<(), Error>;
}
pub trait TransactionReplayer: Send {
fn replay_chunk(
&self,
first_version: Version,
txns: Vec<Transaction>,
txn_infos: Vec<TransactionInfo>,
) -> Result<()>;
fn expecting_version(&self) -> Version;
}
/// A structure that summarizes the result of the execution needed for consensus to agree on.
/// The execution is responsible for generating the ID of the new state, which is returned in the
/// result.
///
/// Not every transaction in the payload succeeds: the returned vector keeps the boolean status
/// of success / failure of the transactions.
/// Note that the specific details of compute_status are opaque to StateMachineReplication,
/// which is going to simply pass the results between StateComputer and TxnManager.
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub struct StateComputeResult {
/// transaction accumulator root hash is identified as `state_id` in Consensus.
root_hash: HashValue,
/// Represents the roots of all the full subtrees from left to right in this accumulator
/// after the execution. For details, please see [`InMemoryAccumulator`](accumulator::InMemoryAccumulator).
frozen_subtree_roots: Vec<HashValue>,
/// The frozen subtrees roots of the parent block,
parent_frozen_subtree_roots: Vec<HashValue>,
/// The number of leaves of the transaction accumulator after executing a proposed block.
/// This state must be persisted to ensure that on restart that the version is calculated correctly.
num_leaves: u64,
/// The number of leaves after executing the parent block,
parent_num_leaves: u64,
/// If set, this is the new epoch info that should be changed to if this block is committed.
epoch_state: Option<EpochState>,
/// The compute status (success/failure) of the given payload. The specific details are opaque
/// for StateMachineReplication, which is merely passing it between StateComputer and
/// TxnManager.
compute_status: Vec<TransactionStatus>,
/// The transaction info hashes of all success txns.
transaction_info_hashes: Vec<HashValue>,
/// The signature of the VoteProposal corresponding to this block.
signature: Option<Ed25519Signature>,
reconfig_events: Vec<ContractEvent>,
}
impl StateComputeResult {
pub fn new(
root_hash: HashValue,
frozen_subtree_roots: Vec<HashValue>,
num_leaves: u64,
parent_frozen_subtree_roots: Vec<HashValue>,
parent_num_leaves: u64,
epoch_state: Option<EpochState>,
compute_status: Vec<TransactionStatus>,
transaction_info_hashes: Vec<HashValue>,
reconfig_events: Vec<ContractEvent>,
) -> Self {
Self {
root_hash,
frozen_subtree_roots,
num_leaves,
parent_frozen_subtree_roots,
parent_num_leaves,
epoch_state,
compute_status,
transaction_info_hashes,
reconfig_events,
signature: None,
}
}
/// generate a new dummy state compute result with a given root hash.
/// this function is used in RandomComputeResultStateComputer to assert that the compute
/// function is really called.
pub fn new_dummy_with_root_hash(root_hash: HashValue) -> Self {
Self {
root_hash,
frozen_subtree_roots: vec![],
num_leaves: 0,
parent_frozen_subtree_roots: vec![],
parent_num_leaves: 0,
epoch_state: None,
compute_status: vec![],
transaction_info_hashes: vec![],
reconfig_events: vec![],
signature: None,
}
}
/// generate a new dummy state compute result with ACCUMULATOR_PLACEHOLDER_HASH as the root hash.
/// this function is used in ordering_state_computer as a dummy state compute result,
/// where the real compute result is generated after ordering_state_computer.commit pushes
/// the blocks and the finality proof to the execution phase.
pub fn new_dummy() -> Self {
StateComputeResult::new_dummy_with_root_hash(*ACCUMULATOR_PLACEHOLDER_HASH)
}
}
impl StateComputeResult {
pub fn version(&self) -> Version {
max(self.num_leaves, 1)
.checked_sub(1)
.expect("Integer overflow occurred")
}
pub fn root_hash(&self) -> HashValue {
self.root_hash
}
pub fn compute_status(&self) -> &Vec<TransactionStatus> {
&self.compute_status
}
pub fn epoch_state(&self) -> &Option<EpochState> {
&self.epoch_state
}
pub fn extension_proof(&self) -> AccumulatorExtensionProof<TransactionAccumulatorHasher> {
AccumulatorExtensionProof::<TransactionAccumulatorHasher>::new(
self.parent_frozen_subtree_roots.clone(),
self.parent_num_leaves(),
self.transaction_info_hashes().clone(),
)
}
pub fn transaction_info_hashes(&self) -> &Vec<HashValue> {
&self.transaction_info_hashes
}
pub fn num_leaves(&self) -> u64 {
self.num_leaves
}
pub fn frozen_subtree_roots(&self) -> &Vec<HashValue> {
&self.frozen_subtree_roots
}
pub fn parent_num_leaves(&self) -> u64 {
self.parent_num_leaves
}
pub fn parent_frozen_subtree_roots(&self) -> &Vec<HashValue> {
&self.parent_frozen_subtree_roots
}
pub fn has_reconfiguration(&self) -> bool {
self.epoch_state.is_some()
}
pub fn reconfig_events(&self) -> &[ContractEvent] {
&self.reconfig_events
}
pub fn signature(&self) -> &Option<Ed25519Signature> {
&self.signature
}
pub fn set_signature(&mut self, sig: Ed25519Signature) {
self.signature = Some(sig);
}
}
/// A wrapper of the in-memory state sparse merkle tree and the transaction accumulator that
/// represent a specific state collectively. Usually it is a state after executing a block.
#[derive(Clone, Debug)]
pub struct ExecutedTrees {
/// The in-memory Sparse Merkle Tree representing a specific state after execution. If this
/// tree is presenting the latest commited state, it will have a single Subtree node (or
/// Empty node) whose hash equals the root hash of the newest Sparse Merkle Tree in
/// storage.
state_tree: Arc<SparseMerkleTree>,
/// The in-memory Merkle Accumulator representing a blockchain state consistent with the
/// `state_tree`.
transaction_accumulator: Arc<InMemoryAccumulator<TransactionAccumulatorHasher>>,
}
impl From<TreeState> for ExecutedTrees {
fn from(tree_state: TreeState) -> Self {
ExecutedTrees::new(
tree_state.account_state_root_hash,
tree_state.ledger_frozen_subtree_hashes,
tree_state.num_transactions,
)
}
}
impl ExecutedTrees {
pub fn new_copy(
state_tree: Arc<SparseMerkleTree>,
transaction_accumulator: Arc<InMemoryAccumulator<TransactionAccumulatorHasher>>,
) -> Self {
Self {
state_tree,
transaction_accumulator,
}
}
pub fn state_tree(&self) -> &Arc<SparseMerkleTree> {
&self.state_tree
}
pub fn txn_accumulator(&self) -> &Arc<InMemoryAccumulator<TransactionAccumulatorHasher>> {
&self.transaction_accumulator
}
pub fn version(&self) -> Option<Version> {
let num_elements = self.txn_accumulator().num_leaves() as u64;
num_elements.checked_sub(1)
}
pub fn state_id(&self) -> HashValue {
self.txn_accumulator().root_hash()
}
pub fn state_root(&self) -> HashValue {
self.state_tree().root_hash()
}
pub fn new(
state_root_hash: HashValue,
frozen_subtrees_in_accumulator: Vec<HashValue>,
num_leaves_in_accumulator: u64,
) -> ExecutedTrees {
ExecutedTrees {
state_tree: Arc::new(SparseMerkleTree::new(state_root_hash)),
transaction_accumulator: Arc::new(
InMemoryAccumulator::new(frozen_subtrees_in_accumulator, num_leaves_in_accumulator)
.expect("The startup info read from storage should be valid."),
),
}
}
pub fn new_empty() -> ExecutedTrees {
Self::new(*SPARSE_MERKLE_PLACEHOLDER_HASH, vec![], 0)
}
}
pub struct ProofReader {
account_to_proof: HashMap<HashValue, SparseMerkleProof>,
}
impl ProofReader {
pub fn new(account_to_proof: HashMap<HashValue, SparseMerkleProof>) -> Self {
ProofReader { account_to_proof }
}
}
impl ProofRead<AccountStateBlob> for ProofReader {
fn get_proof(&self, key: HashValue) -> Option<&SparseMerkleProof> {
self.account_to_proof.get(&key)
}
}
/// The entire set of data associated with a transaction. In addition to the output generated by VM
/// which includes the write set and events, this also has the in-memory trees.
#[derive(Clone, Debug)]
pub struct TransactionData {
/// Each entry in this map represents the new blob value of an account touched by this
/// transaction. The blob is obtained by deserializing the previous blob into a BTreeMap,
/// applying relevant portion of write set on the map and serializing the updated map into a
/// new blob.
account_blobs: HashMap<AccountAddress, AccountStateBlob>,
/// Each entry in this map represents the the hash of a newly generated jellyfish node
/// and its corresponding nibble path.
jf_node_hashes: HashMap<NibblePath, HashValue>,
/// The writeset generated from this transaction.
write_set: WriteSet,
/// The list of events emitted during this transaction.
events: Vec<ContractEvent>,
/// The execution status set by the VM.
status: TransactionStatus,
/// Root hash of the state tree.
state_root_hash: HashValue,
/// The in-memory Merkle Accumulator that has all events emitted by this transaction.
event_tree: Arc<InMemoryAccumulator<EventAccumulatorHasher>>,
/// The amount of gas used.
gas_used: u64,
/// The transaction info hash if the VM status output was keep, None otherwise
txn_info_hash: Option<HashValue>,
}
impl TransactionData {
pub fn new(
account_blobs: HashMap<AccountAddress, AccountStateBlob>,
jf_node_hashes: HashMap<NibblePath, HashValue>,
write_set: WriteSet,
events: Vec<ContractEvent>,
status: TransactionStatus,
state_root_hash: HashValue,
event_tree: Arc<InMemoryAccumulator<EventAccumulatorHasher>>,
gas_used: u64,
txn_info_hash: Option<HashValue>,
) -> Self {
TransactionData {
account_blobs,
jf_node_hashes,
write_set,
events,
status,
state_root_hash,
event_tree,
gas_used,
txn_info_hash,
}
}
pub fn account_blobs(&self) -> &HashMap<AccountAddress, AccountStateBlob> {
&self.account_blobs
}
pub fn jf_node_hashes(&self) -> &HashMap<NibblePath, HashValue> {
&self.jf_node_hashes
}
pub fn write_set(&self) -> &WriteSet {
&self.write_set
}
pub fn events(&self) -> &[ContractEvent] {
&self.events
}
pub fn status(&self) -> &TransactionStatus {
&self.status
}
pub fn state_root_hash(&self) -> HashValue {
self.state_root_hash
}
pub fn event_root_hash(&self) -> HashValue {
self.event_tree.root_hash()
}
pub fn gas_used(&self) -> u64 {
self.gas_used
}
pub fn txn_info_hash(&self) -> Option<HashValue> {
self.txn_info_hash
}
}
/// The output of Processing the vm output of a series of transactions to the parent
/// in-memory state merkle tree and accumulator.
#[derive(Debug, Clone)]
pub struct ProcessedVMOutput {
/// The entire set of data associated with each transaction.
transaction_data: Vec<TransactionData>,
/// The in-memory Merkle Accumulator and state Sparse Merkle Tree after appending all the
/// transactions in this set.
executed_trees: ExecutedTrees,
/// If set, this is the new epoch info that should be changed to if this block is committed.
epoch_state: Option<EpochState>,
}
impl ProcessedVMOutput {
pub fn new(
transaction_data: Vec<TransactionData>,
executed_trees: ExecutedTrees,
epoch_state: Option<EpochState>,
) -> Self {
ProcessedVMOutput {
transaction_data,
executed_trees,
epoch_state,
}
}
pub fn transaction_data(&self) -> &[TransactionData] {
&self.transaction_data
}
pub fn executed_trees(&self) -> &ExecutedTrees {
&self.executed_trees
}
pub fn accu_root(&self) -> HashValue {
self.executed_trees().state_id()
}
pub fn version(&self) -> Option<Version> {
self.executed_trees().version()
}
pub fn epoch_state(&self) -> &Option<EpochState> {
&self.epoch_state
}
pub fn has_reconfiguration(&self) -> bool {
self.epoch_state.is_some()
}
pub fn compute_result(
&self,
parent_frozen_subtree_roots: Vec<HashValue>,
parent_num_leaves: u64,
) -> StateComputeResult {
let new_epoch_event_key = on_chain_config::new_epoch_event_key();
let txn_accu = self.executed_trees().txn_accumulator();
let mut compute_status = Vec::new();
let mut transaction_info_hashes = Vec::new();
let mut reconfig_events = Vec::new();
for txn_data in self.transaction_data() {
let status = txn_data.status();
compute_status.push(status.clone());
if matches!(status, TransactionStatus::Keep(_)) {
transaction_info_hashes.push(txn_data.txn_info_hash().expect("Txn to be kept."));
reconfig_events.extend(
txn_data
.events()
.iter()
.filter(|e| *e.key() == new_epoch_event_key)
.cloned(),
)
}
}
// Now that we have the root hash and execution status we can send the response to
// consensus.
// TODO: The VM will support a special transaction to set the validators for the
// next epoch that is part of a block execution.
StateComputeResult::new(
self.accu_root(),
txn_accu.frozen_subtree_roots().clone(),
txn_accu.num_leaves(),
parent_frozen_subtree_roots,
parent_num_leaves,
self.epoch_state.clone(),
compute_status,
transaction_info_hashes,
reconfig_events,
)
}
}
| 33.140065 | 111 | 0.656969 |
918fa5f7dc12467bb6a19fd5601756d5ccf027eb | 12,508 | use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{differing_macro_contexts, path_to_local, usage::is_potentially_mutated};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, PathSegment, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::Ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for calls of `unwrap[_err]()` that cannot fail.
///
/// ### Why is this bad?
/// Using `if let` or `match` is more idiomatic.
///
/// ### Example
/// ```rust
/// # let option = Some(0);
/// # fn do_something_with(_x: usize) {}
/// if option.is_some() {
/// do_something_with(option.unwrap())
/// }
/// ```
///
/// Could be written:
///
/// ```rust
/// # let option = Some(0);
/// # fn do_something_with(_x: usize) {}
/// if let Some(value) = option {
/// do_something_with(value)
/// }
/// ```
#[clippy::version = "pre 1.29.0"]
pub UNNECESSARY_UNWRAP,
complexity,
"checks for calls of `unwrap[_err]()` that cannot fail"
}
declare_clippy_lint! {
/// ### What it does
/// Checks for calls of `unwrap[_err]()` that will always fail.
///
/// ### Why is this bad?
/// If panicking is desired, an explicit `panic!()` should be used.
///
/// ### Known problems
/// This lint only checks `if` conditions not assignments.
/// So something like `let x: Option<()> = None; x.unwrap();` will not be recognized.
///
/// ### Example
/// ```rust
/// # let option = Some(0);
/// # fn do_something_with(_x: usize) {}
/// if option.is_none() {
/// do_something_with(option.unwrap())
/// }
/// ```
///
/// This code will always panic. The if condition should probably be inverted.
#[clippy::version = "pre 1.29.0"]
pub PANICKING_UNWRAP,
correctness,
"checks for calls of `unwrap[_err]()` that will always fail"
}
/// Visitor that keeps track of which variables are unwrappable.
struct UnwrappableVariablesVisitor<'a, 'tcx> {
unwrappables: Vec<UnwrapInfo<'tcx>>,
cx: &'a LateContext<'tcx>,
}
/// What kind of unwrappable this is.
#[derive(Copy, Clone, Debug)]
enum UnwrappableKind {
Option,
Result,
}
impl UnwrappableKind {
fn success_variant_pattern(self) -> &'static str {
match self {
UnwrappableKind::Option => "Some(..)",
UnwrappableKind::Result => "Ok(..)",
}
}
fn error_variant_pattern(self) -> &'static str {
match self {
UnwrappableKind::Option => "None",
UnwrappableKind::Result => "Err(..)",
}
}
}
/// Contains information about whether a variable can be unwrapped.
#[derive(Copy, Clone, Debug)]
struct UnwrapInfo<'tcx> {
/// The variable that is checked
local_id: HirId,
/// The if itself
if_expr: &'tcx Expr<'tcx>,
/// The check, like `x.is_ok()`
check: &'tcx Expr<'tcx>,
/// The check's name, like `is_ok`
check_name: &'tcx PathSegment<'tcx>,
/// The branch where the check takes place, like `if x.is_ok() { .. }`
branch: &'tcx Expr<'tcx>,
/// Whether `is_some()` or `is_ok()` was called (as opposed to `is_err()` or `is_none()`).
safe_to_unwrap: bool,
/// What kind of unwrappable this is.
kind: UnwrappableKind,
/// If the check is the entire condition (`if x.is_ok()`) or only a part of it (`foo() &&
/// x.is_ok()`)
is_entire_condition: bool,
}
/// Collects the information about unwrappable variables from an if condition
/// The `invert` argument tells us whether the condition is negated.
fn collect_unwrap_info<'tcx>(
cx: &LateContext<'tcx>,
if_expr: &'tcx Expr<'_>,
expr: &'tcx Expr<'_>,
branch: &'tcx Expr<'_>,
invert: bool,
is_entire_condition: bool,
) -> Vec<UnwrapInfo<'tcx>> {
fn is_relevant_option_call(cx: &LateContext<'_>, ty: Ty<'_>, method_name: &str) -> bool {
is_type_diagnostic_item(cx, ty, sym::Option) && ["is_some", "is_none"].contains(&method_name)
}
fn is_relevant_result_call(cx: &LateContext<'_>, ty: Ty<'_>, method_name: &str) -> bool {
is_type_diagnostic_item(cx, ty, sym::Result) && ["is_ok", "is_err"].contains(&method_name)
}
if let ExprKind::Binary(op, left, right) = &expr.kind {
match (invert, op.node) {
(false, BinOpKind::And | BinOpKind::BitAnd) | (true, BinOpKind::Or | BinOpKind::BitOr) => {
let mut unwrap_info = collect_unwrap_info(cx, if_expr, left, branch, invert, false);
unwrap_info.append(&mut collect_unwrap_info(cx, if_expr, right, branch, invert, false));
return unwrap_info;
},
_ => (),
}
} else if let ExprKind::Unary(UnOp::Not, expr) = &expr.kind {
return collect_unwrap_info(cx, if_expr, expr, branch, !invert, false);
} else {
if_chain! {
if let ExprKind::MethodCall(method_name, _, args, _) = &expr.kind;
if let Some(local_id) = path_to_local(&args[0]);
let ty = cx.typeck_results().expr_ty(&args[0]);
let name = method_name.ident.as_str();
if is_relevant_option_call(cx, ty, name) || is_relevant_result_call(cx, ty, name);
then {
assert!(args.len() == 1);
let unwrappable = match name {
"is_some" | "is_ok" => true,
"is_err" | "is_none" => false,
_ => unreachable!(),
};
let safe_to_unwrap = unwrappable != invert;
let kind = if is_type_diagnostic_item(cx, ty, sym::Option) {
UnwrappableKind::Option
} else {
UnwrappableKind::Result
};
return vec![
UnwrapInfo {
local_id,
if_expr,
check: expr,
check_name: method_name,
branch,
safe_to_unwrap,
kind,
is_entire_condition,
}
]
}
}
}
Vec::new()
}
impl<'a, 'tcx> UnwrappableVariablesVisitor<'a, 'tcx> {
fn visit_branch(
&mut self,
if_expr: &'tcx Expr<'_>,
cond: &'tcx Expr<'_>,
branch: &'tcx Expr<'_>,
else_branch: bool,
) {
let prev_len = self.unwrappables.len();
for unwrap_info in collect_unwrap_info(self.cx, if_expr, cond, branch, else_branch, true) {
if is_potentially_mutated(unwrap_info.local_id, cond, self.cx)
|| is_potentially_mutated(unwrap_info.local_id, branch, self.cx)
{
// if the variable is mutated, we don't know whether it can be unwrapped:
continue;
}
self.unwrappables.push(unwrap_info);
}
walk_expr(self, branch);
self.unwrappables.truncate(prev_len);
}
}
impl<'a, 'tcx> Visitor<'tcx> for UnwrappableVariablesVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
// Shouldn't lint when `expr` is in macro.
if in_external_macro(self.cx.tcx.sess, expr.span) {
return;
}
if let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr) {
walk_expr(self, cond);
self.visit_branch(expr, cond, then, false);
if let Some(else_inner) = r#else {
self.visit_branch(expr, cond, else_inner, true);
}
} else {
// find `unwrap[_err]()` calls:
if_chain! {
if let ExprKind::MethodCall(method_name, _, [self_arg, ..], _) = expr.kind;
if let Some(id) = path_to_local(self_arg);
if [sym::unwrap, sym::expect, sym!(unwrap_err)].contains(&method_name.ident.name);
let call_to_unwrap = [sym::unwrap, sym::expect].contains(&method_name.ident.name);
if let Some(unwrappable) = self.unwrappables.iter()
.find(|u| u.local_id == id);
// Span contexts should not differ with the conditional branch
if !differing_macro_contexts(unwrappable.branch.span, expr.span);
if !differing_macro_contexts(unwrappable.branch.span, unwrappable.check.span);
then {
if call_to_unwrap == unwrappable.safe_to_unwrap {
let is_entire_condition = unwrappable.is_entire_condition;
let unwrappable_variable_name = self.cx.tcx.hir().name(unwrappable.local_id);
let suggested_pattern = if call_to_unwrap {
unwrappable.kind.success_variant_pattern()
} else {
unwrappable.kind.error_variant_pattern()
};
span_lint_and_then(
self.cx,
UNNECESSARY_UNWRAP,
expr.span,
&format!(
"called `{}` on `{}` after checking its variant with `{}`",
method_name.ident.name,
unwrappable_variable_name,
unwrappable.check_name.ident.as_str(),
),
|diag| {
if is_entire_condition {
diag.span_suggestion(
unwrappable.check.span.with_lo(unwrappable.if_expr.span.lo()),
"try",
format!(
"if let {} = {}",
suggested_pattern,
unwrappable_variable_name,
),
// We don't track how the unwrapped value is used inside the
// block or suggest deleting the unwrap, so we can't offer a
// fixable solution.
Applicability::Unspecified,
);
} else {
diag.span_label(unwrappable.check.span, "the check is happening here");
diag.help("try using `if let` or `match`");
}
},
);
} else {
span_lint_and_then(
self.cx,
PANICKING_UNWRAP,
expr.span,
&format!("this call to `{}()` will always panic",
method_name.ident.name),
|diag| { diag.span_label(unwrappable.check.span, "because of this check"); },
);
}
}
}
walk_expr(self, expr);
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
declare_lint_pass!(Unwrap => [PANICKING_UNWRAP, UNNECESSARY_UNWRAP]);
impl<'tcx> LateLintPass<'tcx> for Unwrap {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
kind: FnKind<'tcx>,
decl: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
span: Span,
fn_id: HirId,
) {
if span.from_expansion() {
return;
}
let mut v = UnwrappableVariablesVisitor {
cx,
unwrappables: Vec::new(),
};
walk_fn(&mut v, kind, decl, body.id(), span, fn_id);
}
}
| 38.018237 | 107 | 0.514471 |
9c8212621619e63e6f5abff29b15ce16c51ba047 | 1,731 | #![feature(test)]
extern crate test;
use solana::bank::*;
use solana::mint::Mint;
use solana::status_deque::MAX_ENTRY_IDS;
use solana_sdk::hash::hash;
use solana_sdk::signature::{Keypair, KeypairUtil};
use solana_sdk::system_transaction::SystemTransaction;
use solana_sdk::transaction::Transaction;
use test::Bencher;
#[bench]
fn bench_process_transaction(bencher: &mut Bencher) {
let mint = Mint::new(100_000_000);
let bank = Bank::new(&mint);
// Create transactions between unrelated parties.
let transactions: Vec<_> = (0..4096)
.into_iter()
.map(|_| {
// Seed the 'from' account.
let rando0 = Keypair::new();
let tx = Transaction::system_move(
&mint.keypair(),
rando0.pubkey(),
10_000,
bank.last_id(),
0,
);
assert_eq!(bank.process_transaction(&tx), Ok(()));
// Seed the 'to' account and a cell for its signature.
let rando1 = Keypair::new();
let tx = Transaction::system_move(&rando0, rando1.pubkey(), 1, bank.last_id(), 0);
assert_eq!(bank.process_transaction(&tx), Ok(()));
// Finally, return the transaction to the benchmark.
tx
})
.collect();
let mut id = bank.last_id();
for _ in 0..(MAX_ENTRY_IDS - 1) {
bank.register_tick(&id);
id = hash(&id.as_ref())
}
bencher.iter(|| {
// Since benchmarker runs this multiple times, we need to clear the signatures.
bank.clear_signatures();
let results = bank.process_transactions(&transactions);
assert!(results.iter().all(Result::is_ok));
})
}
| 29.844828 | 94 | 0.580589 |
ff64436fc91fa7f838add7eb3bfd3670ed7bc904 | 16,177 | //! The [Value] trait describes what operations can be performed on interpreter values. The
//! interpreter usually executes using [DataValue]s so an implementation is provided here. The fact
//! that [Value] is a trait, however, allows interpretation of Cranelift IR on other kinds of
//! values.
use core::convert::TryFrom;
use core::fmt::{self, Display, Formatter};
use cranelift_codegen::data_value::{DataValue, DataValueCastFailure};
use cranelift_codegen::ir::immediates::{Ieee32, Ieee64};
use cranelift_codegen::ir::{types, Type};
use thiserror::Error;
pub type ValueResult<T> = Result<T, ValueError>;
pub trait Value: Clone + From<DataValue> {
// Identity.
fn ty(&self) -> Type;
fn int(n: i128, ty: Type) -> ValueResult<Self>;
fn into_int(self) -> ValueResult<i128>;
fn float(n: u64, ty: Type) -> ValueResult<Self>;
fn into_float(self) -> ValueResult<f64>;
fn is_nan(&self) -> ValueResult<bool>;
fn bool(b: bool, ty: Type) -> ValueResult<Self>;
fn into_bool(self) -> ValueResult<bool>;
fn vector(v: [u8; 16], ty: Type) -> ValueResult<Self>;
fn convert(self, kind: ValueConversionKind) -> ValueResult<Self>;
fn concat(self, other: Self) -> ValueResult<Self>;
// Comparison.
fn eq(&self, other: &Self) -> ValueResult<bool>;
fn gt(&self, other: &Self) -> ValueResult<bool>;
fn ge(&self, other: &Self) -> ValueResult<bool> {
Ok(self.eq(other)? || self.gt(other)?)
}
fn lt(&self, other: &Self) -> ValueResult<bool> {
other.gt(self)
}
fn le(&self, other: &Self) -> ValueResult<bool> {
Ok(other.eq(self)? || other.gt(self)?)
}
fn uno(&self, other: &Self) -> ValueResult<bool>;
fn overflow(&self, other: &Self) -> ValueResult<bool>;
// Arithmetic.
fn add(self, other: Self) -> ValueResult<Self>;
fn sub(self, other: Self) -> ValueResult<Self>;
fn mul(self, other: Self) -> ValueResult<Self>;
fn div(self, other: Self) -> ValueResult<Self>;
fn rem(self, other: Self) -> ValueResult<Self>;
// Bitwise.
fn shl(self, other: Self) -> ValueResult<Self>;
fn ushr(self, other: Self) -> ValueResult<Self>;
fn ishr(self, other: Self) -> ValueResult<Self>;
fn rotl(self, other: Self) -> ValueResult<Self>;
fn rotr(self, other: Self) -> ValueResult<Self>;
fn and(self, other: Self) -> ValueResult<Self>;
fn or(self, other: Self) -> ValueResult<Self>;
fn xor(self, other: Self) -> ValueResult<Self>;
fn not(self) -> ValueResult<Self>;
}
#[derive(Error, Debug, PartialEq)]
pub enum ValueError {
#[error("unable to convert type {1} into class {0}")]
InvalidType(ValueTypeClass, Type),
#[error("unable to convert value into type {0}")]
InvalidValue(Type),
#[error("unable to convert to primitive integer")]
InvalidInteger(#[from] std::num::TryFromIntError),
#[error("unable to cast data value")]
InvalidDataValueCast(#[from] DataValueCastFailure),
#[error("performed a division by zero")]
IntegerDivisionByZero,
}
#[derive(Debug, PartialEq)]
pub enum ValueTypeClass {
Integer,
Boolean,
Float,
}
impl Display for ValueTypeClass {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
ValueTypeClass::Integer => write!(f, "integer"),
ValueTypeClass::Boolean => write!(f, "boolean"),
ValueTypeClass::Float => write!(f, "float"),
}
}
}
#[derive(Debug)]
pub enum ValueConversionKind {
/// Throw a [ValueError] if an exact conversion to [Type] is not possible; e.g. in `i32` to
/// `i16`, convert `0x00001234` to `0x1234`.
Exact(Type),
/// Truncate the value to fit into the specified [Type]; e.g. in `i16` to `i8`, `0x1234` becomes
/// `0x34`.
Truncate(Type),
/// Similar to Truncate, but extracts from the top of the value; e.g. in a `i32` to `u8`,
/// `0x12345678` becomes `0x12`.
ExtractUpper(Type),
/// Convert to a larger integer type, extending the sign bit; e.g. in `i8` to `i16`, `0xff`
/// becomes `0xffff`.
SignExtend(Type),
/// Convert to a larger integer type, extending with zeroes; e.g. in `i8` to `i16`, `0xff`
/// becomes `0x00ff`.
ZeroExtend(Type),
/// Convert a signed integer to its unsigned value of the same size; e.g. in `i8` to `u8`,
/// `0xff` (`-1`) becomes `0xff` (`255`).
ToUnsigned,
/// Convert an unsigned integer to its signed value of the same size; e.g. in `u8` to `i8`,
/// `0xff` (`255`) becomes `0xff` (`-1`).
ToSigned,
/// Convert a floating point number by rounding to the nearest possible value with ties to even.
/// See `fdemote`, e.g.
RoundNearestEven(Type),
/// Converts an integer into a boolean, zero integers are converted into a
/// `false`, while other integers are converted into `true`. Booleans are passed through.
ToBoolean,
}
/// Helper for creating match expressions over [DataValue].
macro_rules! unary_match {
( $op:tt($arg1:expr); [ $( $data_value_ty:ident ),* ] ) => {
match $arg1 {
$( DataValue::$data_value_ty(a) => { Ok(DataValue::$data_value_ty($op a)) } )*
_ => unimplemented!()
}
};
}
macro_rules! binary_match {
( $op:ident($arg1:expr, $arg2:expr); [ $( $data_value_ty:ident ),* ] ) => {
match ($arg1, $arg2) {
$( (DataValue::$data_value_ty(a), DataValue::$data_value_ty(b)) => { Ok(DataValue::$data_value_ty(a.$op(*b))) } )*
_ => unimplemented!()
}
};
( $op:tt($arg1:expr, $arg2:expr); [ $( $data_value_ty:ident ),* ] ) => {
match ($arg1, $arg2) {
$( (DataValue::$data_value_ty(a), DataValue::$data_value_ty(b)) => { Ok(DataValue::$data_value_ty(a $op b)) } )*
_ => unimplemented!()
}
};
( $op:tt($arg1:expr, $arg2:expr); unsigned integers ) => {
match ($arg1, $arg2) {
(DataValue::I8(a), DataValue::I8(b)) => { Ok(DataValue::I8((u8::try_from(*a)? $op u8::try_from(*b)?) as i8)) }
(DataValue::I16(a), DataValue::I16(b)) => { Ok(DataValue::I16((u16::try_from(*a)? $op u16::try_from(*b)?) as i16)) }
(DataValue::I32(a), DataValue::I32(b)) => { Ok(DataValue::I32((u32::try_from(*a)? $op u32::try_from(*b)?) as i32)) }
(DataValue::I64(a), DataValue::I64(b)) => { Ok(DataValue::I64((u64::try_from(*a)? $op u64::try_from(*b)?) as i64)) }
_ => { Err(ValueError::InvalidType(ValueTypeClass::Integer, if !($arg1).ty().is_int() { ($arg1).ty() } else { ($arg2).ty() })) }
}
};
}
macro_rules! comparison_match {
( $op:path[$arg1:expr, $arg2:expr]; [ $( $data_value_ty:ident ),* ] ) => {
match ($arg1, $arg2) {
$( (DataValue::$data_value_ty(a), DataValue::$data_value_ty(b)) => { Ok($op(a, b)) } )*
_ => unimplemented!("comparison: {:?}, {:?}", $arg1, $arg2)
}
};
}
impl Value for DataValue {
fn ty(&self) -> Type {
self.ty()
}
fn int(n: i128, ty: Type) -> ValueResult<Self> {
if ty.is_int() && !ty.is_vector() {
DataValue::from_integer(n, ty).map_err(|_| ValueError::InvalidValue(ty))
} else {
Err(ValueError::InvalidType(ValueTypeClass::Integer, ty))
}
}
fn into_int(self) -> ValueResult<i128> {
match self {
DataValue::I8(n) => Ok(n as i128),
DataValue::I16(n) => Ok(n as i128),
DataValue::I32(n) => Ok(n as i128),
DataValue::I64(n) => Ok(n as i128),
DataValue::I128(n) => Ok(n),
DataValue::U8(n) => Ok(n as i128),
DataValue::U16(n) => Ok(n as i128),
DataValue::U32(n) => Ok(n as i128),
DataValue::U64(n) => Ok(n as i128),
DataValue::U128(n) => Ok(n as i128),
_ => Err(ValueError::InvalidType(ValueTypeClass::Integer, self.ty())),
}
}
fn float(bits: u64, ty: Type) -> ValueResult<Self> {
match ty {
types::F32 => Ok(DataValue::F32(Ieee32::with_bits(u32::try_from(bits)?))),
types::F64 => Ok(DataValue::F64(Ieee64::with_bits(bits))),
_ => Err(ValueError::InvalidType(ValueTypeClass::Float, ty)),
}
}
fn into_float(self) -> ValueResult<f64> {
unimplemented!()
}
fn is_nan(&self) -> ValueResult<bool> {
match self {
DataValue::F32(f) => Ok(f.is_nan()),
DataValue::F64(f) => Ok(f.is_nan()),
_ => Err(ValueError::InvalidType(ValueTypeClass::Float, self.ty())),
}
}
fn bool(b: bool, ty: Type) -> ValueResult<Self> {
assert!(ty.is_bool());
Ok(DataValue::B(b))
}
fn into_bool(self) -> ValueResult<bool> {
match self {
DataValue::B(b) => Ok(b),
_ => Err(ValueError::InvalidType(ValueTypeClass::Boolean, self.ty())),
}
}
fn vector(_v: [u8; 16], _ty: Type) -> ValueResult<Self> {
unimplemented!()
}
fn convert(self, kind: ValueConversionKind) -> ValueResult<Self> {
Ok(match kind {
ValueConversionKind::Exact(ty) => match (self, ty) {
// TODO a lot to do here: from bmask to ireduce to raw_bitcast...
(DataValue::I64(n), types::I32) => DataValue::I32(i32::try_from(n)?),
(DataValue::B(b), t) if t.is_bool() => DataValue::B(b),
(dv, _) => unimplemented!("conversion: {} -> {:?}", dv.ty(), kind),
},
ValueConversionKind::Truncate(ty) => {
assert!(
ty.is_int(),
"unimplemented conversion: {} -> {:?}",
self.ty(),
kind
);
let mask = (1 << (ty.bytes() * 8)) - 1i128;
let truncated = self.into_int()? & mask;
Self::from_integer(truncated, ty)?
}
ValueConversionKind::ExtractUpper(ty) => {
assert!(
ty.is_int(),
"unimplemented conversion: {} -> {:?}",
self.ty(),
kind
);
let shift_amt = 128 - (ty.bytes() * 8);
let mask = (1 << (ty.bytes() * 8)) - 1i128;
let shifted_mask = mask << shift_amt;
let extracted = (self.into_int()? & shifted_mask) >> shift_amt;
Self::from_integer(extracted, ty)?
}
ValueConversionKind::SignExtend(ty) => match (self.ty(), ty) {
(types::I8, types::I16) => unimplemented!(),
(types::I8, types::I32) => unimplemented!(),
(types::I8, types::I64) => unimplemented!(),
(types::I16, types::I32) => unimplemented!(),
(types::I16, types::I64) => unimplemented!(),
(types::I32, types::I64) => unimplemented!(),
_ => unimplemented!("conversion: {} -> {:?}", self.ty(), kind),
},
ValueConversionKind::ZeroExtend(ty) => match (self.ty(), ty) {
(types::I8, types::I16) => unimplemented!(),
(types::I8, types::I32) => unimplemented!(),
(types::I8, types::I64) => unimplemented!(),
(types::I16, types::I32) => unimplemented!(),
(types::I16, types::I64) => unimplemented!(),
(types::I32, types::I64) => unimplemented!(),
_ => unimplemented!("conversion: {} -> {:?}", self.ty(), kind),
},
ValueConversionKind::ToUnsigned => match self {
DataValue::I8(n) => DataValue::U8(n as u8),
DataValue::I16(n) => DataValue::U16(n as u16),
DataValue::I32(n) => DataValue::U32(n as u32),
DataValue::I64(n) => DataValue::U64(n as u64),
_ => unimplemented!("conversion: {} -> {:?}", self.ty(), kind),
},
ValueConversionKind::ToSigned => match self {
DataValue::U8(n) => DataValue::I8(n as i8),
DataValue::U16(n) => DataValue::I16(n as i16),
DataValue::U32(n) => DataValue::I32(n as i32),
DataValue::U64(n) => DataValue::I64(n as i64),
_ => unimplemented!("conversion: {} -> {:?}", self.ty(), kind),
},
ValueConversionKind::RoundNearestEven(ty) => match (self.ty(), ty) {
(types::F64, types::F32) => unimplemented!(),
_ => unimplemented!("conversion: {} -> {:?}", self.ty(), kind),
},
ValueConversionKind::ToBoolean => match self.ty() {
ty if ty.is_bool() => DataValue::B(self.into_bool()?),
ty if ty.is_int() => DataValue::B(self.into_int()? != 0),
ty => unimplemented!("conversion: {} -> {:?}", ty, kind),
},
})
}
fn concat(self, other: Self) -> ValueResult<Self> {
match (self, other) {
(DataValue::I64(lhs), DataValue::I64(rhs)) => Ok(DataValue::I128(
(((lhs as u64) as u128) | (((rhs as u64) as u128) << 64)) as i128,
)),
(lhs, rhs) => unimplemented!("concat: {} -> {}", lhs.ty(), rhs.ty()),
}
}
fn eq(&self, other: &Self) -> ValueResult<bool> {
comparison_match!(PartialEq::eq[&self, &other]; [I8, I16, I32, I64, U8, U16, U32, U64, F32, F64])
}
fn gt(&self, other: &Self) -> ValueResult<bool> {
comparison_match!(PartialOrd::gt[&self, &other]; [I8, I16, I32, I64, U8, U16, U32, U64, F32, F64])
}
fn uno(&self, other: &Self) -> ValueResult<bool> {
Ok(self.is_nan()? || other.is_nan()?)
}
fn overflow(&self, other: &Self) -> ValueResult<bool> {
Ok(match (self, other) {
(DataValue::I8(a), DataValue::I8(b)) => a.checked_sub(*b).is_none(),
(DataValue::I16(a), DataValue::I16(b)) => a.checked_sub(*b).is_none(),
(DataValue::I32(a), DataValue::I32(b)) => a.checked_sub(*b).is_none(),
(DataValue::I64(a), DataValue::I64(b)) => a.checked_sub(*b).is_none(),
_ => unimplemented!(),
})
}
fn add(self, other: Self) -> ValueResult<Self> {
binary_match!(wrapping_add(&self, &other); [I8, I16, I32, I64, I128]) // TODO: floats must handle NaNs, +/-0
}
fn sub(self, other: Self) -> ValueResult<Self> {
binary_match!(wrapping_sub(&self, &other); [I8, I16, I32, I64, I128]) // TODO: floats must handle NaNs, +/-0
}
fn mul(self, other: Self) -> ValueResult<Self> {
binary_match!(wrapping_mul(&self, &other); [I8, I16, I32, I64, I128])
}
fn div(self, other: Self) -> ValueResult<Self> {
if other.clone().into_int()? == 0 {
return Err(ValueError::IntegerDivisionByZero);
}
binary_match!(/(&self, &other); [I8, I16, I32, I64, U8, U16, U32, U64])
}
fn rem(self, other: Self) -> ValueResult<Self> {
if other.clone().into_int()? == 0 {
return Err(ValueError::IntegerDivisionByZero);
}
binary_match!(%(&self, &other); [I8, I16, I32, I64])
}
fn shl(self, other: Self) -> ValueResult<Self> {
binary_match!(<<(&self, &other); [I8, I16, I32, I64])
}
fn ushr(self, other: Self) -> ValueResult<Self> {
binary_match!(>>(&self, &other); unsigned integers)
}
fn ishr(self, other: Self) -> ValueResult<Self> {
binary_match!(>>(&self, &other); [I8, I16, I32, I64])
}
fn rotl(self, _other: Self) -> ValueResult<Self> {
unimplemented!()
}
fn rotr(self, _other: Self) -> ValueResult<Self> {
unimplemented!()
}
fn and(self, other: Self) -> ValueResult<Self> {
binary_match!(&(&self, &other); [I8, I16, I32, I64])
}
fn or(self, other: Self) -> ValueResult<Self> {
binary_match!(|(&self, &other); [I8, I16, I32, I64])
}
fn xor(self, other: Self) -> ValueResult<Self> {
binary_match!(^(&self, &other); [I8, I16, I32, I64])
}
fn not(self) -> ValueResult<Self> {
unary_match!(!(&self); [I8, I16, I32, I64])
}
}
| 40.042079 | 140 | 0.541633 |
1e472f259f0190c45c9340a893fe00bd7e16da7c | 3,372 | #[doc = "Reader of register LIMITH"]
pub type R = crate::R<u32, super::LIMITH>;
#[doc = "Writer for register LIMITH"]
pub type W = crate::W<u32, super::LIMITH>;
#[doc = "Register LIMITH `reset()`'s with value 0"]
impl crate::ResetValue for super::LIMITH {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CHIDX`"]
pub type CHIDX_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CHIDX`"]
pub struct CHIDX_W<'a> {
w: &'a mut W,
}
impl<'a> CHIDX_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EN_A {
#[doc = "0: Disable publishing"]
DISABLED = 0,
#[doc = "1: Enable publishing"]
ENABLED = 1,
}
impl From<EN_A> for bool {
#[inline(always)]
fn from(variant: EN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `EN`"]
pub type EN_R = crate::R<bool, EN_A>;
impl EN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EN_A {
match self.bits {
false => EN_A::DISABLED,
true => EN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == EN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == EN_A::ENABLED
}
}
#[doc = "Write proxy for field `EN`"]
pub struct EN_W<'a> {
w: &'a mut W,
}
impl<'a> EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disable publishing"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(EN_A::DISABLED)
}
#[doc = "Enable publishing"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(EN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Channel that event CH\\[n\\].LIMITH will publish to."]
#[inline(always)]
pub fn chidx(&self) -> CHIDX_R {
CHIDX_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn en(&self) -> EN_R {
EN_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - Channel that event CH\\[n\\].LIMITH will publish to."]
#[inline(always)]
pub fn chidx(&mut self) -> CHIDX_W {
CHIDX_W { w: self }
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn en(&mut self) -> EN_W {
EN_W { w: self }
}
}
| 26.761905 | 86 | 0.534401 |
11682afdf899ba0580be428629ebf1cb6a655c7d | 85,839 | //! Rustdoc's HTML rendering module.
//!
//! This modules contains the bulk of the logic necessary for rendering a
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
//! rendering process is largely driven by the `format!` syntax extension to
//! perform all I/O into files and streams.
//!
//! The rendering process is largely driven by the `Context` and `Cache`
//! structures. The cache is pre-populated by crawling the crate in question,
//! and then it is shared among the various rendering threads. The cache is meant
//! to be a fairly large structure not implementing `Clone` (because it's shared
//! among threads). The context, however, should be a lightweight structure. This
//! is cloned per-thread and contains information about what is currently being
//! rendered.
//!
//! In order to speed up rendering (mostly because of markdown rendering), the
//! rendering process has been parallelized. This parallelization is only
//! exposed through the `crate` method on the context, and then also from the
//! fact that the shared cache is stored in TLS (and must be accessed as such).
//!
//! In addition to rendering the crate itself, this module is also responsible
//! for creating the corresponding search index and source file renderings.
//! These threads are not parallelized (they haven't been a bottleneck yet), and
//! both occur before the crate is rendered.
crate mod cache;
#[cfg(test)]
mod tests;
mod context;
mod print_item;
mod span_map;
mod write_shared;
crate use context::*;
crate use span_map::{collect_spans_and_sources, LinkFromSrc};
use std::collections::VecDeque;
use std::default::Default;
use std::fmt;
use std::path::PathBuf;
use std::str;
use std::string::ToString;
use rustc_ast_pretty::pprust;
use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::def::CtorKind;
use rustc_hir::def_id::DefId;
use rustc_hir::Mutability;
use rustc_middle::middle::stability;
use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::{kw, sym, Symbol};
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use crate::clean::{self, GetDefId, ItemId, RenderedLink, SelfTy};
use crate::docfs::PathError;
use crate::error::Error;
use crate::formats::cache::Cache;
use crate::formats::item_type::ItemType;
use crate::formats::{AssocItemRender, Impl, RenderMode};
use crate::html::escape::Escape;
use crate::html::format::{
href, print_abi_with_space, print_constness_with_space, print_default_space,
print_generic_bounds, print_where_clause, Buffer, HrefError, PrintWithSpace,
};
use crate::html::markdown::{HeadingOffset, Markdown, MarkdownHtml, MarkdownSummaryLine};
/// A pair of name and its optional document.
crate type NameDoc = (String, Option<String>);
crate fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
crate::html::format::display_fn(move |f| {
if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { f.write_str(v) }
})
}
// Helper structs for rendering items/sidebars and carrying along contextual
// information
/// Struct representing one entry in the JS search index. These are all emitted
/// by hand to a large JS file at the end of cache-creation.
#[derive(Debug)]
crate struct IndexItem {
crate ty: ItemType,
crate name: String,
crate path: String,
crate desc: String,
crate parent: Option<DefId>,
crate parent_idx: Option<usize>,
crate search_type: Option<IndexItemFunctionType>,
crate aliases: Box<[String]>,
}
/// A type used for the search index.
#[derive(Debug)]
crate struct RenderType {
name: Option<String>,
generics: Option<Vec<String>>,
}
/// Full type of functions/methods in the search index.
#[derive(Debug)]
crate struct IndexItemFunctionType {
inputs: Vec<TypeWithKind>,
output: Option<Vec<TypeWithKind>>,
}
impl Serialize for IndexItemFunctionType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// If we couldn't figure out a type, just write `null`.
let mut iter = self.inputs.iter();
if match self.output {
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.ty.name.is_none()),
None => iter.any(|ref i| i.ty.name.is_none()),
} {
serializer.serialize_none()
} else {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&self.inputs)?;
if let Some(output) = &self.output {
if output.len() > 1 {
seq.serialize_element(&output)?;
} else {
seq.serialize_element(&output[0])?;
}
}
seq.end()
}
}
}
#[derive(Debug)]
crate struct TypeWithKind {
ty: RenderType,
kind: ItemType,
}
impl From<(RenderType, ItemType)> for TypeWithKind {
fn from(x: (RenderType, ItemType)) -> TypeWithKind {
TypeWithKind { ty: x.0, kind: x.1 }
}
}
impl Serialize for TypeWithKind {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&self.ty.name)?;
seq.serialize_element(&self.kind)?;
if let Some(generics) = &self.ty.generics {
seq.serialize_element(generics)?;
}
seq.end()
}
}
#[derive(Debug, Clone)]
crate struct StylePath {
/// The path to the theme
crate path: PathBuf,
/// What the `disabled` attribute should be set to in the HTML tag
crate disabled: bool,
}
fn write_srclink(cx: &Context<'_>, item: &clean::Item, buf: &mut Buffer) {
if let Some(l) = cx.src_href(item) {
write!(buf, "<a class=\"srclink\" href=\"{}\" title=\"goto source code\">[src]</a>", l)
}
}
#[derive(Debug, Eq, PartialEq, Hash)]
struct ItemEntry {
url: String,
name: String,
}
impl ItemEntry {
fn new(mut url: String, name: String) -> ItemEntry {
while url.starts_with('/') {
url.remove(0);
}
ItemEntry { url, name }
}
}
impl ItemEntry {
crate fn print(&self) -> impl fmt::Display + '_ {
crate::html::format::display_fn(move |f| {
write!(f, "<a href=\"{}\">{}</a>", self.url, Escape(&self.name))
})
}
}
impl PartialOrd for ItemEntry {
fn partial_cmp(&self, other: &ItemEntry) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ItemEntry {
fn cmp(&self, other: &ItemEntry) -> ::std::cmp::Ordering {
self.name.cmp(&other.name)
}
}
#[derive(Debug)]
struct AllTypes {
structs: FxHashSet<ItemEntry>,
enums: FxHashSet<ItemEntry>,
unions: FxHashSet<ItemEntry>,
primitives: FxHashSet<ItemEntry>,
traits: FxHashSet<ItemEntry>,
macros: FxHashSet<ItemEntry>,
functions: FxHashSet<ItemEntry>,
typedefs: FxHashSet<ItemEntry>,
opaque_tys: FxHashSet<ItemEntry>,
statics: FxHashSet<ItemEntry>,
constants: FxHashSet<ItemEntry>,
attributes: FxHashSet<ItemEntry>,
derives: FxHashSet<ItemEntry>,
trait_aliases: FxHashSet<ItemEntry>,
}
impl AllTypes {
fn new() -> AllTypes {
let new_set = |cap| FxHashSet::with_capacity_and_hasher(cap, Default::default());
AllTypes {
structs: new_set(100),
enums: new_set(100),
unions: new_set(100),
primitives: new_set(26),
traits: new_set(100),
macros: new_set(100),
functions: new_set(100),
typedefs: new_set(100),
opaque_tys: new_set(100),
statics: new_set(100),
constants: new_set(100),
attributes: new_set(100),
derives: new_set(100),
trait_aliases: new_set(100),
}
}
fn append(&mut self, item_name: String, item_type: &ItemType) {
let mut url: Vec<_> = item_name.split("::").skip(1).collect();
if let Some(name) = url.pop() {
let new_url = format!("{}/{}.{}.html", url.join("/"), item_type, name);
url.push(name);
let name = url.join("::");
match *item_type {
ItemType::Struct => self.structs.insert(ItemEntry::new(new_url, name)),
ItemType::Enum => self.enums.insert(ItemEntry::new(new_url, name)),
ItemType::Union => self.unions.insert(ItemEntry::new(new_url, name)),
ItemType::Primitive => self.primitives.insert(ItemEntry::new(new_url, name)),
ItemType::Trait => self.traits.insert(ItemEntry::new(new_url, name)),
ItemType::Macro => self.macros.insert(ItemEntry::new(new_url, name)),
ItemType::Function => self.functions.insert(ItemEntry::new(new_url, name)),
ItemType::Typedef => self.typedefs.insert(ItemEntry::new(new_url, name)),
ItemType::OpaqueTy => self.opaque_tys.insert(ItemEntry::new(new_url, name)),
ItemType::Static => self.statics.insert(ItemEntry::new(new_url, name)),
ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)),
ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)),
ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)),
_ => true,
};
}
}
}
impl AllTypes {
fn print(self, f: &mut Buffer) {
fn print_entries(f: &mut Buffer, e: &FxHashSet<ItemEntry>, title: &str, class: &str) {
if !e.is_empty() {
let mut e: Vec<&ItemEntry> = e.iter().collect();
e.sort();
write!(
f,
"<h3 id=\"{}\">{}</h3><ul class=\"{} docblock\">",
title.replace(' ', "-"), // IDs cannot contain whitespaces.
title,
class
);
for s in e.iter() {
write!(f, "<li>{}</li>", s.print());
}
f.write_str("</ul>");
}
}
f.write_str(
"<h1 class=\"fqn\">\
<span class=\"in-band\">List of all items</span>\
<span class=\"out-of-band\">\
<span id=\"render-detail\">\
<a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \
title=\"collapse all docs\">\
[<span class=\"inner\">−</span>]\
</a>\
</span>
</span>
</h1>",
);
// Note: print_entries does not escape the title, because we know the current set of titles
// doesn't require escaping.
print_entries(f, &self.structs, "Structs", "structs");
print_entries(f, &self.enums, "Enums", "enums");
print_entries(f, &self.unions, "Unions", "unions");
print_entries(f, &self.primitives, "Primitives", "primitives");
print_entries(f, &self.traits, "Traits", "traits");
print_entries(f, &self.macros, "Macros", "macros");
print_entries(f, &self.attributes, "Attribute Macros", "attributes");
print_entries(f, &self.derives, "Derive Macros", "derives");
print_entries(f, &self.functions, "Functions", "functions");
print_entries(f, &self.typedefs, "Typedefs", "typedefs");
print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases");
print_entries(f, &self.opaque_tys, "Opaque Types", "opaque-types");
print_entries(f, &self.statics, "Statics", "statics");
print_entries(f, &self.constants, "Constants", "constants")
}
}
#[derive(Debug)]
enum Setting {
Section {
description: &'static str,
sub_settings: Vec<Setting>,
},
Toggle {
js_data_name: &'static str,
description: &'static str,
default_value: bool,
},
Select {
js_data_name: &'static str,
description: &'static str,
default_value: &'static str,
options: Vec<(String, String)>,
},
}
impl Setting {
fn display(&self, root_path: &str, suffix: &str) -> String {
match *self {
Setting::Section { description, ref sub_settings } => format!(
"<div class=\"setting-line\">\
<div class=\"title\">{}</div>\
<div class=\"sub-settings\">{}</div>
</div>",
description,
sub_settings.iter().map(|s| s.display(root_path, suffix)).collect::<String>()
),
Setting::Toggle { js_data_name, description, default_value } => format!(
"<div class=\"setting-line\">\
<label class=\"toggle\">\
<input type=\"checkbox\" id=\"{}\" {}>\
<span class=\"slider\"></span>\
</label>\
<div>{}</div>\
</div>",
js_data_name,
if default_value { " checked" } else { "" },
description,
),
Setting::Select { js_data_name, description, default_value, ref options } => format!(
"<div class=\"setting-line\">\
<div>{}</div>\
<label class=\"select-wrapper\">\
<select id=\"{}\" autocomplete=\"off\">{}</select>\
<img src=\"{}down-arrow{}.svg\" alt=\"Select item\">\
</label>\
</div>",
description,
js_data_name,
options
.iter()
.map(|opt| format!(
"<option value=\"{}\" {}>{}</option>",
opt.0,
if opt.0 == default_value { "selected" } else { "" },
opt.1,
))
.collect::<String>(),
root_path,
suffix,
),
}
}
}
impl From<(&'static str, &'static str, bool)> for Setting {
fn from(values: (&'static str, &'static str, bool)) -> Setting {
Setting::Toggle { js_data_name: values.0, description: values.1, default_value: values.2 }
}
}
impl<T: Into<Setting>> From<(&'static str, Vec<T>)> for Setting {
fn from(values: (&'static str, Vec<T>)) -> Setting {
Setting::Section {
description: values.0,
sub_settings: values.1.into_iter().map(|v| v.into()).collect::<Vec<_>>(),
}
}
}
fn settings(root_path: &str, suffix: &str, themes: &[StylePath]) -> Result<String, Error> {
let theme_names: Vec<(String, String)> = themes
.iter()
.map(|entry| {
let theme =
try_none!(try_none!(entry.path.file_stem(), &entry.path).to_str(), &entry.path)
.to_string();
Ok((theme.clone(), theme))
})
.collect::<Result<_, Error>>()?;
// (id, explanation, default value)
let settings: &[Setting] = &[
(
"Theme preferences",
vec![
Setting::from(("use-system-theme", "Use system theme", true)),
Setting::Select {
js_data_name: "preferred-dark-theme",
description: "Preferred dark theme",
default_value: "dark",
options: theme_names.clone(),
},
Setting::Select {
js_data_name: "preferred-light-theme",
description: "Preferred light theme",
default_value: "light",
options: theme_names,
},
],
)
.into(),
("auto-hide-large-items", "Auto-hide item contents for large items.", true).into(),
("auto-hide-method-docs", "Auto-hide item methods' documentation", false).into(),
("auto-hide-trait-implementations", "Auto-hide trait implementation documentation", false)
.into(),
("go-to-only-result", "Directly go to item in search if there is only one result", false)
.into(),
("line-numbers", "Show line numbers on code examples", false).into(),
("disable-shortcuts", "Disable keyboard shortcuts", false).into(),
];
Ok(format!(
"<h1 class=\"fqn\">\
<span class=\"in-band\">Rustdoc settings</span>\
</h1>\
<div class=\"settings\">{}</div>\
<script src=\"{}settings{}.js\"></script>",
settings.iter().map(|s| s.display(root_path, suffix)).collect::<String>(),
root_path,
suffix
))
}
fn document(
w: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
parent: Option<&clean::Item>,
heading_offset: HeadingOffset,
) {
if let Some(ref name) = item.name {
info!("Documenting {}", name);
}
document_item_info(w, cx, item, parent);
if parent.is_none() {
document_full_collapsible(w, item, cx, heading_offset);
} else {
document_full(w, item, cx, heading_offset);
}
}
/// Render md_text as markdown.
fn render_markdown(
w: &mut Buffer,
cx: &Context<'_>,
md_text: &str,
links: Vec<RenderedLink>,
heading_offset: HeadingOffset,
) {
let mut ids = cx.id_map.borrow_mut();
write!(
w,
"<div class=\"docblock\">{}</div>",
Markdown {
content: md_text,
links: &links,
ids: &mut ids,
error_codes: cx.shared.codes,
edition: cx.shared.edition(),
playground: &cx.shared.playground,
heading_offset,
}
.into_string()
)
}
/// Writes a documentation block containing only the first paragraph of the documentation. If the
/// docs are longer, a "Read more" link is appended to the end.
fn document_short(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
link: AssocItemLink<'_>,
parent: &clean::Item,
show_def_docs: bool,
) {
document_item_info(w, cx, item, Some(parent));
if !show_def_docs {
return;
}
if let Some(s) = item.doc_value() {
let mut summary_html = MarkdownSummaryLine(&s, &item.links(cx)).into_string();
if s.contains('\n') {
let link = format!(r#" <a href="{}">Read more</a>"#, naive_assoc_href(item, link, cx));
if let Some(idx) = summary_html.rfind("</p>") {
summary_html.insert_str(idx, &link);
} else {
summary_html.push_str(&link);
}
}
write!(w, "<div class='docblock'>{}</div>", summary_html,);
}
}
fn document_full_collapsible(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
heading_offset: HeadingOffset,
) {
document_full_inner(w, item, cx, true, heading_offset);
}
fn document_full(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
heading_offset: HeadingOffset,
) {
document_full_inner(w, item, cx, false, heading_offset);
}
fn document_full_inner(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
is_collapsible: bool,
heading_offset: HeadingOffset,
) {
if let Some(s) = cx.shared.maybe_collapsed_doc_value(item) {
debug!("Doc block: =====\n{}\n=====", s);
if is_collapsible {
w.write_str(
"<details class=\"rustdoc-toggle top-doc\" open>\
<summary class=\"hideme\">\
<span>Expand description</span>\
</summary>",
);
render_markdown(w, cx, &s, item.links(cx), heading_offset);
w.write_str("</details>");
} else {
render_markdown(w, cx, &s, item.links(cx), heading_offset);
}
}
}
/// Add extra information about an item such as:
///
/// * Stability
/// * Deprecated
/// * Required features (through the `doc_cfg` feature)
fn document_item_info(
w: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
parent: Option<&clean::Item>,
) {
let item_infos = short_item_info(item, cx, parent);
if !item_infos.is_empty() {
w.write_str("<div class=\"item-info\">");
for info in item_infos {
w.write_str(&info);
}
w.write_str("</div>");
}
}
fn portability(item: &clean::Item, parent: Option<&clean::Item>) -> Option<String> {
let cfg = match (&item.cfg, parent.and_then(|p| p.cfg.as_ref())) {
(Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
(cfg, _) => cfg.as_deref().cloned(),
};
debug!("Portability {:?} - {:?} = {:?}", item.cfg, parent.and_then(|p| p.cfg.as_ref()), cfg);
Some(format!("<div class=\"stab portability\">{}</div>", cfg?.render_long_html()))
}
/// Render the stability, deprecation and portability information that is displayed at the top of
/// the item's documentation.
fn short_item_info(
item: &clean::Item,
cx: &Context<'_>,
parent: Option<&clean::Item>,
) -> Vec<String> {
let mut extra_info = vec![];
let error_codes = cx.shared.codes;
if let Some(depr @ Deprecation { note, since, is_since_rustc_version: _, suggestion: _ }) =
item.deprecation(cx.tcx())
{
// We display deprecation messages for #[deprecated] and #[rustc_deprecated]
// but only display the future-deprecation messages for #[rustc_deprecated].
let mut message = if let Some(since) = since {
let since = &since.as_str();
if !stability::deprecation_in_effect(&depr) {
if *since == "TBD" {
String::from("Deprecating in a future Rust version")
} else {
format!("Deprecating in {}", Escape(since))
}
} else {
format!("Deprecated since {}", Escape(since))
}
} else {
String::from("Deprecated")
};
if let Some(note) = note {
let note = note.as_str();
let mut ids = cx.id_map.borrow_mut();
let html = MarkdownHtml(
¬e,
&mut ids,
error_codes,
cx.shared.edition(),
&cx.shared.playground,
);
message.push_str(&format!(": {}", html.into_string()));
}
extra_info.push(format!(
"<div class=\"stab deprecated\"><span class=\"emoji\">👎</span> {}</div>",
message,
));
}
// Render unstable items. But don't render "rustc_private" crates (internal compiler crates).
// Those crates are permanently unstable so it makes no sense to render "unstable" everywhere.
if let Some((StabilityLevel::Unstable { reason, issue, .. }, feature)) = item
.stability(cx.tcx())
.as_ref()
.filter(|stab| stab.feature != sym::rustc_private)
.map(|stab| (stab.level, stab.feature))
{
let mut message =
"<span class=\"emoji\">🔬</span> This is a nightly-only experimental API.".to_owned();
let mut feature = format!("<code>{}</code>", Escape(&feature.as_str()));
if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) {
feature.push_str(&format!(
" <a href=\"{url}{issue}\">#{issue}</a>",
url = url,
issue = issue
));
}
message.push_str(&format!(" ({})", feature));
if let Some(unstable_reason) = reason {
let mut ids = cx.id_map.borrow_mut();
message = format!(
"<details><summary>{}</summary>{}</details>",
message,
MarkdownHtml(
&unstable_reason.as_str(),
&mut ids,
error_codes,
cx.shared.edition(),
&cx.shared.playground,
)
.into_string()
);
}
extra_info.push(format!("<div class=\"stab unstable\">{}</div>", message));
}
if let Some(portability) = portability(item, parent) {
extra_info.push(portability);
}
extra_info
}
// Render the list of items inside one of the sections "Trait Implementations",
// "Auto Trait Implementations," "Blanket Trait Implementations" (on struct/enum pages).
fn render_impls(
cx: &Context<'_>,
w: &mut Buffer,
traits: &[&&Impl],
containing_item: &clean::Item,
) {
let cache = cx.cache();
let tcx = cx.tcx();
let mut impls = traits
.iter()
.map(|i| {
let did = i.trait_did_full(cache).unwrap();
let provided_trait_methods = i.inner_impl().provided_trait_methods(tcx);
let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_trait_methods);
let mut buffer = if w.is_for_html() { Buffer::html() } else { Buffer::new() };
render_impl(
&mut buffer,
cx,
i,
containing_item,
assoc_link,
RenderMode::Normal,
None,
&[],
ImplRenderingParameters {
show_def_docs: true,
is_on_foreign_type: false,
show_default_items: true,
show_non_assoc_items: true,
toggle_open_by_default: true,
},
);
buffer.into_inner()
})
.collect::<Vec<_>>();
impls.sort();
w.write_str(&impls.join(""));
}
fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>, cx: &Context<'_>) -> String {
use crate::formats::item_type::ItemType::*;
let name = it.name.as_ref().unwrap();
let ty = match it.type_() {
Typedef | AssocType => AssocType,
s => s,
};
let anchor = format!("#{}.{}", ty, name);
match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
AssocItemLink::Anchor(None) => anchor,
AssocItemLink::GotoSource(did, _) => {
href(did.expect_def_id(), cx).map(|p| format!("{}{}", p.0, anchor)).unwrap_or(anchor)
}
}
}
fn assoc_const(
w: &mut Buffer,
it: &clean::Item,
ty: &clean::Type,
_default: Option<&String>,
link: AssocItemLink<'_>,
extra: &str,
cx: &Context<'_>,
) {
write!(
w,
"{}{}const <a href=\"{}\" class=\"constant\">{}</a>: {}",
extra,
it.visibility.print_with_space(it.def_id, cx),
naive_assoc_href(it, link, cx),
it.name.as_ref().unwrap(),
ty.print(cx)
);
}
fn assoc_type(
w: &mut Buffer,
it: &clean::Item,
bounds: &[clean::GenericBound],
default: Option<&clean::Type>,
link: AssocItemLink<'_>,
extra: &str,
cx: &Context<'_>,
) {
write!(
w,
"{}type <a href=\"{}\" class=\"type\">{}</a>",
extra,
naive_assoc_href(it, link, cx),
it.name.as_ref().unwrap()
);
if !bounds.is_empty() {
write!(w, ": {}", print_generic_bounds(bounds, cx))
}
if let Some(default) = default {
write!(w, " = {}", default.print(cx))
}
}
fn render_stability_since_raw(
w: &mut Buffer,
ver: Option<&str>,
const_stability: Option<&ConstStability>,
containing_ver: Option<&str>,
containing_const_ver: Option<&str>,
) {
let ver = ver.filter(|inner| !inner.is_empty());
match (ver, const_stability) {
// stable and const stable
(Some(v), Some(ConstStability { level: StabilityLevel::Stable { since }, .. }))
if Some(since.as_str()).as_deref() != containing_const_ver =>
{
write!(
w,
"<span class=\"since\" title=\"Stable since Rust version {0}, const since {1}\">{0} (const: {1})</span>",
v, since
);
}
// stable and const unstable
(
Some(v),
Some(ConstStability { level: StabilityLevel::Unstable { issue, .. }, feature, .. }),
) => {
write!(
w,
"<span class=\"since\" title=\"Stable since Rust version {0}, const unstable\">{0} (const: ",
v
);
if let Some(n) = issue {
write!(
w,
"<a href=\"https://github.com/rust-lang/rust/issues/{}\" title=\"Tracking issue for {}\">unstable</a>",
n, feature
);
} else {
write!(w, "unstable");
}
write!(w, ")</span>");
}
// stable
(Some(v), _) if ver != containing_ver => {
write!(
w,
"<span class=\"since\" title=\"Stable since Rust version {0}\">{0}</span>",
v
);
}
_ => {}
}
}
fn render_assoc_item(
w: &mut Buffer,
item: &clean::Item,
link: AssocItemLink<'_>,
parent: ItemType,
cx: &Context<'_>,
) {
fn method(
w: &mut Buffer,
meth: &clean::Item,
header: hir::FnHeader,
g: &clean::Generics,
d: &clean::FnDecl,
link: AssocItemLink<'_>,
parent: ItemType,
cx: &Context<'_>,
) {
let name = meth.name.as_ref().unwrap();
let href = match link {
AssocItemLink::Anchor(Some(ref id)) => Some(format!("#{}", id)),
AssocItemLink::Anchor(None) => Some(format!("#{}.{}", meth.type_(), name)),
AssocItemLink::GotoSource(did, provided_methods) => {
// We're creating a link from an impl-item to the corresponding
// trait-item and need to map the anchored type accordingly.
let ty = if provided_methods.contains(&name) {
ItemType::Method
} else {
ItemType::TyMethod
};
match (href(did.expect_def_id(), cx), ty) {
(Ok(p), ty) => Some(format!("{}#{}.{}", p.0, ty, name)),
(Err(HrefError::DocumentationNotBuilt), ItemType::TyMethod) => None,
(Err(_), ty) => Some(format!("#{}.{}", ty, name)),
}
}
};
let vis = meth.visibility.print_with_space(meth.def_id, cx).to_string();
let constness =
print_constness_with_space(&header.constness, meth.const_stability(cx.tcx()));
let asyncness = header.asyncness.print_with_space();
let unsafety = header.unsafety.print_with_space();
let defaultness = print_default_space(meth.is_default());
let abi = print_abi_with_space(header.abi).to_string();
// NOTE: `{:#}` does not print HTML formatting, `{}` does. So `g.print` can't be reused between the length calculation and `write!`.
let generics_len = format!("{:#}", g.print(cx)).len();
let mut header_len = "fn ".len()
+ vis.len()
+ constness.len()
+ asyncness.len()
+ unsafety.len()
+ defaultness.len()
+ abi.len()
+ name.as_str().len()
+ generics_len;
let (indent, indent_str, end_newline) = if parent == ItemType::Trait {
header_len += 4;
let indent_str = " ";
render_attributes_in_pre(w, meth, indent_str);
(4, indent_str, false)
} else {
render_attributes_in_code(w, meth);
(0, "", true)
};
w.reserve(header_len + "<a href=\"\" class=\"fnname\">{".len() + "</a>".len());
write!(
w,
"{indent}{vis}{constness}{asyncness}{unsafety}{defaultness}{abi}fn <a {href} class=\"fnname\">{name}</a>\
{generics}{decl}{notable_traits}{where_clause}",
indent = indent_str,
vis = vis,
constness = constness,
asyncness = asyncness,
unsafety = unsafety,
defaultness = defaultness,
abi = abi,
// links without a href are valid - https://www.w3schools.com/tags/att_a_href.asp
href = href.map(|href| format!("href=\"{}\"", href)).unwrap_or_else(|| "".to_string()),
name = name,
generics = g.print(cx),
decl = d.full_print(header_len, indent, header.asyncness, cx),
notable_traits = notable_traits_decl(&d, cx),
where_clause = print_where_clause(g, cx, indent, end_newline),
)
}
match *item.kind {
clean::StrippedItem(..) => {}
clean::TyMethodItem(ref m) => {
method(w, item, m.header, &m.generics, &m.decl, link, parent, cx)
}
clean::MethodItem(ref m, _) => {
method(w, item, m.header, &m.generics, &m.decl, link, parent, cx)
}
clean::AssocConstItem(ref ty, ref default) => assoc_const(
w,
item,
ty,
default.as_ref(),
link,
if parent == ItemType::Trait { " " } else { "" },
cx,
),
clean::AssocTypeItem(ref bounds, ref default) => assoc_type(
w,
item,
bounds,
default.as_ref(),
link,
if parent == ItemType::Trait { " " } else { "" },
cx,
),
_ => panic!("render_assoc_item called on non-associated-item"),
}
}
const ALLOWED_ATTRIBUTES: &[Symbol] =
&[sym::export_name, sym::link_section, sym::no_mangle, sym::repr, sym::non_exhaustive];
fn attributes(it: &clean::Item) -> Vec<String> {
it.attrs
.other_attrs
.iter()
.filter_map(|attr| {
if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
Some(pprust::attribute_to_string(&attr).replace("\n", "").replace(" ", " "))
} else {
None
}
})
.collect()
}
// When an attribute is rendered inside a `<pre>` tag, it is formatted using
// a whitespace prefix and newline.
fn render_attributes_in_pre(w: &mut Buffer, it: &clean::Item, prefix: &str) {
for a in attributes(it) {
writeln!(w, "{}{}", prefix, a);
}
}
// When an attribute is rendered inside a <code> tag, it is formatted using
// a div to produce a newline after it.
fn render_attributes_in_code(w: &mut Buffer, it: &clean::Item) {
for a in attributes(it) {
write!(w, "<div class=\"code-attribute\">{}</div>", a);
}
}
#[derive(Copy, Clone)]
enum AssocItemLink<'a> {
Anchor(Option<&'a str>),
GotoSource(ItemId, &'a FxHashSet<Symbol>),
}
impl<'a> AssocItemLink<'a> {
fn anchor(&self, id: &'a str) -> Self {
match *self {
AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(&id)),
ref other => *other,
}
}
}
fn render_assoc_items(
w: &mut Buffer,
cx: &Context<'_>,
containing_item: &clean::Item,
it: DefId,
what: AssocItemRender<'_>,
) {
info!("Documenting associated items of {:?}", containing_item.name);
let cache = cx.cache();
let v = match cache.impls.get(&it) {
Some(v) => v,
None => return,
};
let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none());
if !non_trait.is_empty() {
let render_mode = match what {
AssocItemRender::All => {
w.write_str(
"<h2 id=\"implementations\" class=\"small-section-header\">\
Implementations<a href=\"#implementations\" class=\"anchor\"></a>\
</h2>",
);
RenderMode::Normal
}
AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => {
write!(
w,
"<h2 id=\"deref-methods\" class=\"small-section-header\">\
<span>Methods from {trait_}<Target = {type_}></span>\
<a href=\"#deref-methods\" class=\"anchor\"></a>\
</h2>",
trait_ = trait_.print(cx),
type_ = type_.print(cx),
);
RenderMode::ForDeref { mut_: deref_mut_ }
}
};
for i in &non_trait {
render_impl(
w,
cx,
i,
containing_item,
AssocItemLink::Anchor(None),
render_mode,
None,
&[],
ImplRenderingParameters {
show_def_docs: true,
is_on_foreign_type: false,
show_default_items: true,
show_non_assoc_items: true,
toggle_open_by_default: true,
},
);
}
}
if let AssocItemRender::DerefFor { .. } = what {
return;
}
if !traits.is_empty() {
let deref_impl = traits.iter().find(|t| {
t.inner_impl().trait_.def_id_full(cache) == cx.tcx().lang_items().deref_trait()
});
if let Some(impl_) = deref_impl {
let has_deref_mut = traits.iter().any(|t| {
t.inner_impl().trait_.def_id_full(cache) == cx.tcx().lang_items().deref_mut_trait()
});
render_deref_methods(w, cx, impl_, containing_item, has_deref_mut);
}
let (synthetic, concrete): (Vec<&&Impl>, Vec<&&Impl>) =
traits.iter().partition(|t| t.inner_impl().synthetic);
let (blanket_impl, concrete): (Vec<&&Impl>, _) =
concrete.into_iter().partition(|t| t.inner_impl().blanket_impl.is_some());
let mut impls = Buffer::empty_from(&w);
render_impls(cx, &mut impls, &concrete, containing_item);
let impls = impls.into_inner();
if !impls.is_empty() {
write!(
w,
"<h2 id=\"trait-implementations\" class=\"small-section-header\">\
Trait Implementations<a href=\"#trait-implementations\" class=\"anchor\"></a>\
</h2>\
<div id=\"trait-implementations-list\">{}</div>",
impls
);
}
if !synthetic.is_empty() {
w.write_str(
"<h2 id=\"synthetic-implementations\" class=\"small-section-header\">\
Auto Trait Implementations\
<a href=\"#synthetic-implementations\" class=\"anchor\"></a>\
</h2>\
<div id=\"synthetic-implementations-list\">",
);
render_impls(cx, w, &synthetic, containing_item);
w.write_str("</div>");
}
if !blanket_impl.is_empty() {
w.write_str(
"<h2 id=\"blanket-implementations\" class=\"small-section-header\">\
Blanket Implementations\
<a href=\"#blanket-implementations\" class=\"anchor\"></a>\
</h2>\
<div id=\"blanket-implementations-list\">",
);
render_impls(cx, w, &blanket_impl, containing_item);
w.write_str("</div>");
}
}
}
fn render_deref_methods(
w: &mut Buffer,
cx: &Context<'_>,
impl_: &Impl,
container_item: &clean::Item,
deref_mut: bool,
) {
let cache = cx.cache();
let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
let (target, real_target) = impl_
.inner_impl()
.items
.iter()
.find_map(|item| match *item.kind {
clean::TypedefItem(ref t, true) => Some(match *t {
clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
_ => (&t.type_, &t.type_),
}),
_ => None,
})
.expect("Expected associated type binding");
debug!("Render deref methods for {:#?}, target {:#?}", impl_.inner_impl().for_, target);
let what =
AssocItemRender::DerefFor { trait_: deref_type, type_: real_target, deref_mut_: deref_mut };
if let Some(did) = target.def_id_full(cache) {
if let Some(type_did) = impl_.inner_impl().for_.def_id_full(cache) {
// `impl Deref<Target = S> for S`
if did == type_did {
// Avoid infinite cycles
return;
}
}
render_assoc_items(w, cx, container_item, did, what);
} else {
if let Some(prim) = target.primitive_type() {
if let Some(&did) = cache.primitive_locations.get(&prim) {
render_assoc_items(w, cx, container_item, did, what);
}
}
}
}
fn should_render_item(item: &clean::Item, deref_mut_: bool, tcx: TyCtxt<'_>) -> bool {
let self_type_opt = match *item.kind {
clean::MethodItem(ref method, _) => method.decl.self_type(),
clean::TyMethodItem(ref method) => method.decl.self_type(),
_ => None,
};
if let Some(self_ty) = self_type_opt {
let (by_mut_ref, by_box, by_value) = match self_ty {
SelfTy::SelfBorrowed(_, mutability)
| SelfTy::SelfExplicit(clean::BorrowedRef { mutability, .. }) => {
(mutability == Mutability::Mut, false, false)
}
SelfTy::SelfExplicit(clean::ResolvedPath { did, .. }) => {
(false, Some(did) == tcx.lang_items().owned_box(), false)
}
SelfTy::SelfValue => (false, false, true),
_ => (false, false, false),
};
(deref_mut_ || !by_mut_ref) && !by_box && !by_value
} else {
false
}
}
fn notable_traits_decl(decl: &clean::FnDecl, cx: &Context<'_>) -> String {
let mut out = Buffer::html();
let mut trait_ = String::new();
if let Some(did) = decl.output.def_id_full(cx.cache()) {
if let Some(impls) = cx.cache().impls.get(&did) {
for i in impls {
let impl_ = i.inner_impl();
if impl_.trait_.def_id().map_or(false, |d| {
cx.cache().traits.get(&d).map(|t| t.is_notable).unwrap_or(false)
}) {
if out.is_empty() {
write!(
&mut out,
"<div class=\"notable\">Notable traits for {}</div>\
<code class=\"content\">",
impl_.for_.print(cx)
);
trait_.push_str(&impl_.for_.print(cx).to_string());
}
//use the "where" class here to make it small
write!(
&mut out,
"<span class=\"where fmt-newline\">{}</span>",
impl_.print(false, cx)
);
let t_did = impl_.trait_.def_id_full(cx.cache()).unwrap();
for it in &impl_.items {
if let clean::TypedefItem(ref tydef, _) = *it.kind {
out.push_str("<span class=\"where fmt-newline\"> ");
assoc_type(
&mut out,
it,
&[],
Some(&tydef.type_),
AssocItemLink::GotoSource(t_did.into(), &FxHashSet::default()),
"",
cx,
);
out.push_str(";</span>");
}
}
}
}
}
}
if !out.is_empty() {
out.insert_str(
0,
"<span class=\"notable-traits\"><span class=\"notable-traits-tooltip\">ⓘ\
<div class=\"notable-traits-tooltiptext\"><span class=\"docblock\">",
);
out.push_str("</code></span></div></span></span>");
}
out.into_inner()
}
#[derive(Clone, Copy, Debug)]
struct ImplRenderingParameters {
show_def_docs: bool,
is_on_foreign_type: bool,
show_default_items: bool,
/// Whether or not to show methods.
show_non_assoc_items: bool,
toggle_open_by_default: bool,
}
fn render_impl(
w: &mut Buffer,
cx: &Context<'_>,
i: &Impl,
parent: &clean::Item,
link: AssocItemLink<'_>,
render_mode: RenderMode,
use_absolute: Option<bool>,
aliases: &[String],
rendering_params: ImplRenderingParameters,
) {
let cache = cx.cache();
let traits = &cache.traits;
let trait_ = i.trait_did_full(cache).map(|did| &traits[&did]);
let mut close_tags = String::new();
// For trait implementations, the `interesting` output contains all methods that have doc
// comments, and the `boring` output contains all methods that do not. The distinction is
// used to allow hiding the boring methods.
// `containing_item` is used for rendering stability info. If the parent is a trait impl,
// `containing_item` will the grandparent, since trait impls can't have stability attached.
fn doc_impl_item(
boring: &mut Buffer,
interesting: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
parent: &clean::Item,
containing_item: &clean::Item,
link: AssocItemLink<'_>,
render_mode: RenderMode,
is_default_item: bool,
trait_: Option<&clean::Trait>,
rendering_params: ImplRenderingParameters,
) {
let item_type = item.type_();
let name = item.name.as_ref().unwrap();
let render_method_item = rendering_params.show_non_assoc_items
&& match render_mode {
RenderMode::Normal => true,
RenderMode::ForDeref { mut_: deref_mut_ } => {
should_render_item(&item, deref_mut_, cx.tcx())
}
};
let in_trait_class = if trait_.is_some() { " trait-impl" } else { "" };
let mut doc_buffer = Buffer::empty_from(boring);
let mut info_buffer = Buffer::empty_from(boring);
let mut short_documented = true;
if render_method_item {
if !is_default_item {
if let Some(t) = trait_ {
// The trait item may have been stripped so we might not
// find any documentation or stability for it.
if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
// We need the stability of the item from the trait
// because impls can't have a stability.
if item.doc_value().is_some() {
document_item_info(&mut info_buffer, cx, it, Some(parent));
document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
short_documented = false;
} else {
// In case the item isn't documented,
// provide short documentation from the trait.
document_short(
&mut doc_buffer,
it,
cx,
link,
parent,
rendering_params.show_def_docs,
);
}
}
} else {
document_item_info(&mut info_buffer, cx, item, Some(parent));
if rendering_params.show_def_docs {
document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
short_documented = false;
}
}
} else {
document_short(
&mut doc_buffer,
item,
cx,
link,
parent,
rendering_params.show_def_docs,
);
}
}
let w = if short_documented && trait_.is_some() { interesting } else { boring };
let toggled = !doc_buffer.is_empty();
if toggled {
let method_toggle_class =
if item_type == ItemType::Method { " method-toggle" } else { "" };
write!(w, "<details class=\"rustdoc-toggle{}\" open><summary>", method_toggle_class);
}
match *item.kind {
clean::MethodItem(..) | clean::TyMethodItem(_) => {
// Only render when the method is not static or we allow static methods
if render_method_item {
let id = cx.derive_id(format!("{}.{}", item_type, name));
let source_id = trait_
.and_then(|trait_| {
trait_.items.iter().find(|item| {
item.name.map(|n| n.as_str().eq(&name.as_str())).unwrap_or(false)
})
})
.map(|item| format!("{}.{}", item.type_(), name));
write!(
w,
"<div id=\"{}\" class=\"{}{} has-srclink\">",
id, item_type, in_trait_class,
);
render_rightside(w, cx, item, containing_item);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
render_assoc_item(
w,
item,
link.anchor(source_id.as_ref().unwrap_or(&id)),
ItemType::Impl,
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
}
clean::TypedefItem(ref tydef, _) => {
let source_id = format!("{}.{}", ItemType::AssocType, name);
let id = cx.derive_id(source_id.clone());
write!(
w,
"<div id=\"{}\" class=\"{}{} has-srclink\">",
id, item_type, in_trait_class
);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
assoc_type(
w,
item,
&Vec::new(),
Some(&tydef.type_),
link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
clean::AssocConstItem(ref ty, ref default) => {
let source_id = format!("{}.{}", item_type, name);
let id = cx.derive_id(source_id.clone());
write!(
w,
"<div id=\"{}\" class=\"{}{} has-srclink\">",
id, item_type, in_trait_class
);
render_rightside(w, cx, item, containing_item);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
assoc_const(
w,
item,
ty,
default.as_ref(),
link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
clean::AssocTypeItem(ref bounds, ref default) => {
let source_id = format!("{}.{}", item_type, name);
let id = cx.derive_id(source_id.clone());
write!(w, "<div id=\"{}\" class=\"{}{}\">", id, item_type, in_trait_class,);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
assoc_type(
w,
item,
bounds,
default.as_ref(),
link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
clean::StrippedItem(..) => return,
_ => panic!("can't make docs for trait item with name {:?}", item.name),
}
w.push_buffer(info_buffer);
if toggled {
w.write_str("</summary>");
w.push_buffer(doc_buffer);
w.push_str("</details>");
}
}
let mut impl_items = Buffer::empty_from(w);
let mut default_impl_items = Buffer::empty_from(w);
for trait_item in &i.inner_impl().items {
doc_impl_item(
&mut default_impl_items,
&mut impl_items,
cx,
trait_item,
if trait_.is_some() { &i.impl_item } else { parent },
parent,
link,
render_mode,
false,
trait_.map(|t| &t.trait_),
rendering_params,
);
}
fn render_default_items(
boring: &mut Buffer,
interesting: &mut Buffer,
cx: &Context<'_>,
t: &clean::Trait,
i: &clean::Impl,
parent: &clean::Item,
containing_item: &clean::Item,
render_mode: RenderMode,
rendering_params: ImplRenderingParameters,
) {
for trait_item in &t.items {
let n = trait_item.name;
if i.items.iter().any(|m| m.name == n) {
continue;
}
let did = i.trait_.as_ref().unwrap().def_id_full(cx.cache()).unwrap();
let provided_methods = i.provided_trait_methods(cx.tcx());
let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_methods);
doc_impl_item(
boring,
interesting,
cx,
trait_item,
parent,
containing_item,
assoc_link,
render_mode,
true,
Some(t),
rendering_params,
);
}
}
// If we've implemented a trait, then also emit documentation for all
// default items which weren't overridden in the implementation block.
// We don't emit documentation for default items if they appear in the
// Implementations on Foreign Types or Implementors sections.
if rendering_params.show_default_items {
if let Some(t) = trait_ {
render_default_items(
&mut default_impl_items,
&mut impl_items,
cx,
&t.trait_,
&i.inner_impl(),
&i.impl_item,
parent,
render_mode,
rendering_params,
);
}
}
if render_mode == RenderMode::Normal {
let toggled = !(impl_items.is_empty() && default_impl_items.is_empty());
if toggled {
close_tags.insert_str(0, "</details>");
write!(
w,
"<details class=\"rustdoc-toggle implementors-toggle\"{}>",
if rendering_params.toggle_open_by_default { " open" } else { "" }
);
write!(w, "<summary>")
}
render_impl_summary(
w,
cx,
i,
parent,
parent,
rendering_params.show_def_docs,
use_absolute,
rendering_params.is_on_foreign_type,
aliases,
);
if toggled {
write!(w, "</summary>")
}
if let Some(ref dox) = cx.shared.maybe_collapsed_doc_value(&i.impl_item) {
let mut ids = cx.id_map.borrow_mut();
write!(
w,
"<div class=\"docblock\">{}</div>",
Markdown {
content: &*dox,
links: &i.impl_item.links(cx),
ids: &mut ids,
error_codes: cx.shared.codes,
edition: cx.shared.edition(),
playground: &cx.shared.playground,
heading_offset: HeadingOffset::H2
}
.into_string()
);
}
}
if !default_impl_items.is_empty() || !impl_items.is_empty() {
w.write_str("<div class=\"impl-items\">");
w.push_buffer(default_impl_items);
w.push_buffer(impl_items);
close_tags.insert_str(0, "</div>");
}
w.write_str(&close_tags);
}
// Render the items that appear on the right side of methods, impls, and
// associated types. For example "1.0.0 (const: 1.39.0) [src]".
fn render_rightside(
w: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
containing_item: &clean::Item,
) {
let tcx = cx.tcx();
write!(w, "<div class=\"rightside\">");
render_stability_since_raw(
w,
item.stable_since(tcx).as_deref(),
item.const_stability(tcx),
containing_item.stable_since(tcx).as_deref(),
containing_item.const_stable_since(tcx).as_deref(),
);
write_srclink(cx, item, w);
w.write_str("</div>");
}
pub(crate) fn render_impl_summary(
w: &mut Buffer,
cx: &Context<'_>,
i: &Impl,
parent: &clean::Item,
containing_item: &clean::Item,
show_def_docs: bool,
use_absolute: Option<bool>,
is_on_foreign_type: bool,
// This argument is used to reference same type with different paths to avoid duplication
// in documentation pages for trait with automatic implementations like "Send" and "Sync".
aliases: &[String],
) {
let id = cx.derive_id(match i.inner_impl().trait_ {
Some(ref t) => {
if is_on_foreign_type {
get_id_for_impl_on_foreign_type(&i.inner_impl().for_, t, cx)
} else {
format!("impl-{}", small_url_encode(format!("{:#}", t.print(cx))))
}
}
None => "impl".to_string(),
});
let aliases = if aliases.is_empty() {
String::new()
} else {
format!(" data-aliases=\"{}\"", aliases.join(","))
};
write!(w, "<div id=\"{}\" class=\"impl has-srclink\"{}>", id, aliases);
render_rightside(w, cx, &i.impl_item, containing_item);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
write!(w, "<h3 class=\"code-header in-band\">");
if let Some(use_absolute) = use_absolute {
write!(w, "{}", i.inner_impl().print(use_absolute, cx));
if show_def_docs {
for it in &i.inner_impl().items {
if let clean::TypedefItem(ref tydef, _) = *it.kind {
w.write_str("<span class=\"where fmt-newline\"> ");
assoc_type(w, it, &[], Some(&tydef.type_), AssocItemLink::Anchor(None), "", cx);
w.write_str(";</span>");
}
}
}
} else {
write!(w, "{}", i.inner_impl().print(false, cx));
}
write!(w, "</h3>");
let is_trait = i.inner_impl().trait_.is_some();
if is_trait {
if let Some(portability) = portability(&i.impl_item, Some(parent)) {
write!(w, "<div class=\"item-info\">{}</div>", portability);
}
}
w.write_str("</div>");
}
fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) {
let parentlen = cx.current.len() - if it.is_mod() { 1 } else { 0 };
if it.is_struct()
|| it.is_trait()
|| it.is_primitive()
|| it.is_union()
|| it.is_enum()
|| it.is_mod()
|| it.is_typedef()
{
write!(
buffer,
"<h2 class=\"location\">{}{}</h2>",
match *it.kind {
clean::StructItem(..) => "Struct ",
clean::TraitItem(..) => "Trait ",
clean::PrimitiveItem(..) => "Primitive Type ",
clean::UnionItem(..) => "Union ",
clean::EnumItem(..) => "Enum ",
clean::TypedefItem(..) => "Type Definition ",
clean::ForeignTypeItem => "Foreign Type ",
clean::ModuleItem(..) =>
if it.is_crate() {
"Crate "
} else {
"Module "
},
_ => "",
},
it.name.as_ref().unwrap()
);
}
if it.is_crate() {
if let Some(ref version) = cx.cache().crate_version {
write!(
buffer,
"<div class=\"block version\">\
<div class=\"narrow-helper\"></div>\
<p>Version {}</p>\
</div>",
Escape(version),
);
}
}
buffer.write_str("<div class=\"sidebar-elems\">");
if it.is_crate() {
write!(
buffer,
"<a id=\"all-types\" href=\"all.html\"><p>See all {}'s items</p></a>",
it.name.as_ref().expect("crates always have a name"),
);
}
match *it.kind {
clean::StructItem(ref s) => sidebar_struct(cx, buffer, it, s),
clean::TraitItem(ref t) => sidebar_trait(cx, buffer, it, t),
clean::PrimitiveItem(_) => sidebar_primitive(cx, buffer, it),
clean::UnionItem(ref u) => sidebar_union(cx, buffer, it, u),
clean::EnumItem(ref e) => sidebar_enum(cx, buffer, it, e),
clean::TypedefItem(_, _) => sidebar_typedef(cx, buffer, it),
clean::ModuleItem(ref m) => sidebar_module(buffer, &m.items),
clean::ForeignTypeItem => sidebar_foreign_type(cx, buffer, it),
_ => {}
}
// The sidebar is designed to display sibling functions, modules and
// other miscellaneous information. since there are lots of sibling
// items (and that causes quadratic growth in large modules),
// we refactor common parts into a shared JavaScript file per module.
// still, we don't move everything into JS because we want to preserve
// as much HTML as possible in order to allow non-JS-enabled browsers
// to navigate the documentation (though slightly inefficiently).
if !it.is_mod() {
buffer.write_str("<h2 class=\"location\">Other items in<br>");
for (i, name) in cx.current.iter().take(parentlen).enumerate() {
if i > 0 {
buffer.write_str("::<wbr>");
}
write!(
buffer,
"<a href=\"{}index.html\">{}</a>",
&cx.root_path()[..(cx.current.len() - i - 1) * 3],
*name
);
}
buffer.write_str("</h2>");
}
// Sidebar refers to the enclosing module, not this module.
let relpath = if it.is_mod() && parentlen != 0 { "./" } else { "" };
write!(
buffer,
"<div id=\"sidebar-vars\" data-name=\"{name}\" data-ty=\"{ty}\" data-relpath=\"{path}\">\
</div>",
name = it.name.unwrap_or(kw::Empty),
ty = it.type_(),
path = relpath
);
write!(buffer, "<script defer src=\"{}sidebar-items.js\"></script>", relpath);
// Closes sidebar-elems div.
buffer.write_str("</div>");
}
fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
if used_links.insert(url.clone()) {
return url;
}
let mut add = 1;
while !used_links.insert(format!("{}-{}", url, add)) {
add += 1;
}
format!("{}-{}", url, add)
}
fn get_methods(
i: &clean::Impl,
for_deref: bool,
used_links: &mut FxHashSet<String>,
deref_mut: bool,
tcx: TyCtxt<'_>,
) -> Vec<String> {
i.items
.iter()
.filter_map(|item| match item.name {
Some(ref name) if !name.is_empty() && item.is_method() => {
if !for_deref || should_render_item(item, deref_mut, tcx) {
Some(format!(
"<a href=\"#{}\">{}</a>",
get_next_url(used_links, format!("method.{}", name)),
name
))
} else {
None
}
}
_ => None,
})
.collect::<Vec<_>>()
}
// The point is to url encode any potential character from a type with genericity.
fn small_url_encode(s: String) -> String {
let mut st = String::new();
let mut last_match = 0;
for (idx, c) in s.char_indices() {
let escaped = match c {
'<' => "%3C",
'>' => "%3E",
' ' => "%20",
'?' => "%3F",
'\'' => "%27",
'&' => "%26",
',' => "%2C",
':' => "%3A",
';' => "%3B",
'[' => "%5B",
']' => "%5D",
'"' => "%22",
_ => continue,
};
st += &s[last_match..idx];
st += escaped;
// NOTE: we only expect single byte characters here - which is fine as long as we
// only match single byte characters
last_match = idx + 1;
}
if last_match != 0 {
st += &s[last_match..];
st
} else {
s
}
}
fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
let did = it.def_id.expect_def_id();
let cache = cx.cache();
if let Some(v) = cache.impls.get(&did) {
let mut used_links = FxHashSet::default();
{
let used_links_bor = &mut used_links;
let mut ret = v
.iter()
.filter(|i| i.inner_impl().trait_.is_none())
.flat_map(move |i| {
get_methods(i.inner_impl(), false, used_links_bor, false, cx.tcx())
})
.collect::<Vec<_>>();
if !ret.is_empty() {
// We want links' order to be reproducible so we don't use unstable sort.
ret.sort();
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#implementations\">Methods</a></h3>\
<div class=\"sidebar-links\">",
);
for line in ret {
out.push_str(&line);
}
out.push_str("</div>");
}
}
if v.iter().any(|i| i.inner_impl().trait_.is_some()) {
if let Some(impl_) = v.iter().filter(|i| i.inner_impl().trait_.is_some()).find(|i| {
i.inner_impl().trait_.def_id_full(cache) == cx.tcx().lang_items().deref_trait()
}) {
sidebar_deref_methods(cx, out, impl_, v);
}
let format_impls = |impls: Vec<&Impl>| {
let mut links = FxHashSet::default();
let mut ret = impls
.iter()
.filter_map(|it| {
if let Some(ref i) = it.inner_impl().trait_ {
let i_display = format!("{:#}", i.print(cx));
let out = Escape(&i_display);
let encoded = small_url_encode(format!("{:#}", i.print(cx)));
let generated = format!(
"<a href=\"#impl-{}\">{}{}</a>",
encoded,
if it.inner_impl().negative_polarity { "!" } else { "" },
out
);
if links.insert(generated.clone()) { Some(generated) } else { None }
} else {
None
}
})
.collect::<Vec<String>>();
ret.sort();
ret
};
let write_sidebar_links = |out: &mut Buffer, links: Vec<String>| {
out.push_str("<div class=\"sidebar-links\">");
for link in links {
out.push_str(&link);
}
out.push_str("</div>");
};
let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) =
v.iter().partition::<Vec<_>, _>(|i| i.inner_impl().synthetic);
let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) = concrete
.into_iter()
.partition::<Vec<_>, _>(|i| i.inner_impl().blanket_impl.is_some());
let concrete_format = format_impls(concrete);
let synthetic_format = format_impls(synthetic);
let blanket_format = format_impls(blanket_impl);
if !concrete_format.is_empty() {
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#trait-implementations\">\
Trait Implementations</a></h3>",
);
write_sidebar_links(out, concrete_format);
}
if !synthetic_format.is_empty() {
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#synthetic-implementations\">\
Auto Trait Implementations</a></h3>",
);
write_sidebar_links(out, synthetic_format);
}
if !blanket_format.is_empty() {
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#blanket-implementations\">\
Blanket Implementations</a></h3>",
);
write_sidebar_links(out, blanket_format);
}
}
}
}
fn sidebar_deref_methods(cx: &Context<'_>, out: &mut Buffer, impl_: &Impl, v: &Vec<Impl>) {
let c = cx.cache();
debug!("found Deref: {:?}", impl_);
if let Some((target, real_target)) =
impl_.inner_impl().items.iter().find_map(|item| match *item.kind {
clean::TypedefItem(ref t, true) => Some(match *t {
clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
_ => (&t.type_, &t.type_),
}),
_ => None,
})
{
debug!("found target, real_target: {:?} {:?}", target, real_target);
if let Some(did) = target.def_id_full(c) {
if let Some(type_did) = impl_.inner_impl().for_.def_id_full(c) {
// `impl Deref<Target = S> for S`
if did == type_did {
// Avoid infinite cycles
return;
}
}
}
let deref_mut = v.iter().filter(|i| i.inner_impl().trait_.is_some()).any(|i| {
i.inner_impl().trait_.def_id_full(c) == cx.tcx().lang_items().deref_mut_trait()
});
let inner_impl = target
.def_id_full(c)
.or_else(|| {
target.primitive_type().and_then(|prim| c.primitive_locations.get(&prim).cloned())
})
.and_then(|did| c.impls.get(&did));
if let Some(impls) = inner_impl {
debug!("found inner_impl: {:?}", impls);
let mut used_links = FxHashSet::default();
let mut ret = impls
.iter()
.filter(|i| i.inner_impl().trait_.is_none())
.flat_map(|i| {
get_methods(i.inner_impl(), true, &mut used_links, deref_mut, cx.tcx())
})
.collect::<Vec<_>>();
if !ret.is_empty() {
write!(
out,
"<h3 class=\"sidebar-title\"><a href=\"#deref-methods\">Methods from {}<Target={}></a></h3>",
Escape(&format!("{:#}", impl_.inner_impl().trait_.as_ref().unwrap().print(cx))),
Escape(&format!("{:#}", real_target.print(cx))),
);
// We want links' order to be reproducible so we don't use unstable sort.
ret.sort();
out.push_str("<div class=\"sidebar-links\">");
for link in ret {
out.push_str(&link);
}
out.push_str("</div>");
}
}
}
}
fn sidebar_struct(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, s: &clean::Struct) {
let mut sidebar = Buffer::new();
let fields = get_struct_fields_name(&s.fields);
if !fields.is_empty() {
if let CtorKind::Fictive = s.struct_type {
sidebar.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#fields\">Fields</a></h3>\
<div class=\"sidebar-links\">",
);
for field in fields {
sidebar.push_str(&field);
}
sidebar.push_str("</div>");
} else if let CtorKind::Fn = s.struct_type {
sidebar
.push_str("<h3 class=\"sidebar-title\"><a href=\"#fields\">Tuple Fields</a></h3>");
}
}
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn get_id_for_impl_on_foreign_type(
for_: &clean::Type,
trait_: &clean::Type,
cx: &Context<'_>,
) -> String {
small_url_encode(format!("impl-{:#}-for-{:#}", trait_.print(cx), for_.print(cx),))
}
fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> {
match *item.kind {
clean::ItemKind::ImplItem(ref i) => {
if let Some(ref trait_) = i.trait_ {
// Alternative format produces no URLs,
// so this parameter does nothing.
Some((
format!("{:#}", i.for_.print(cx)),
get_id_for_impl_on_foreign_type(&i.for_, trait_, cx),
))
} else {
None
}
}
_ => None,
}
}
fn sidebar_trait(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, t: &clean::Trait) {
buf.write_str("<div class=\"block items\">");
fn print_sidebar_section(
out: &mut Buffer,
items: &[clean::Item],
before: &str,
filter: impl Fn(&clean::Item) -> bool,
write: impl Fn(&mut Buffer, &str),
after: &str,
) {
let mut items = items
.iter()
.filter_map(|m| match m.name {
Some(ref name) if filter(m) => Some(name.as_str()),
_ => None,
})
.collect::<Vec<_>>();
if !items.is_empty() {
items.sort_unstable();
out.push_str(before);
for item in items.into_iter() {
write(out, &item);
}
out.push_str(after);
}
}
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#associated-types\">\
Associated Types</a></h3><div class=\"sidebar-links\">",
|m| m.is_associated_type(),
|out, sym| write!(out, "<a href=\"#associatedtype.{0}\">{0}</a>", sym),
"</div>",
);
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#associated-const\">\
Associated Constants</a></h3><div class=\"sidebar-links\">",
|m| m.is_associated_const(),
|out, sym| write!(out, "<a href=\"#associatedconstant.{0}\">{0}</a>", sym),
"</div>",
);
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#required-methods\">\
Required Methods</a></h3><div class=\"sidebar-links\">",
|m| m.is_ty_method(),
|out, sym| write!(out, "<a href=\"#tymethod.{0}\">{0}</a>", sym),
"</div>",
);
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#provided-methods\">\
Provided Methods</a></h3><div class=\"sidebar-links\">",
|m| m.is_method(),
|out, sym| write!(out, "<a href=\"#method.{0}\">{0}</a>", sym),
"</div>",
);
let cache = cx.cache();
if let Some(implementors) = cache.implementors.get(&it.def_id.expect_def_id()) {
let mut res = implementors
.iter()
.filter(|i| {
i.inner_impl()
.for_
.def_id_full(cache)
.map_or(false, |d| !cache.paths.contains_key(&d))
})
.filter_map(|i| extract_for_impl_name(&i.impl_item, cx))
.collect::<Vec<_>>();
if !res.is_empty() {
res.sort();
buf.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#foreign-impls\">\
Implementations on Foreign Types</a></h3>\
<div class=\"sidebar-links\">",
);
for (name, id) in res.into_iter() {
write!(buf, "<a href=\"#{}\">{}</a>", id, Escape(&name));
}
buf.push_str("</div>");
}
}
sidebar_assoc_items(cx, buf, it);
buf.push_str("<h3 class=\"sidebar-title\"><a href=\"#implementors\">Implementors</a></h3>");
if t.is_auto {
buf.push_str(
"<h3 class=\"sidebar-title\"><a \
href=\"#synthetic-implementors\">Auto Implementors</a></h3>",
);
}
buf.push_str("</div>")
}
fn sidebar_primitive(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
let mut sidebar = Buffer::new();
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn sidebar_typedef(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
let mut sidebar = Buffer::new();
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn get_struct_fields_name(fields: &[clean::Item]) -> Vec<String> {
let mut fields = fields
.iter()
.filter(|f| matches!(*f.kind, clean::StructFieldItem(..)))
.filter_map(|f| {
f.name.map(|name| format!("<a href=\"#structfield.{name}\">{name}</a>", name = name))
})
.collect::<Vec<_>>();
fields.sort();
fields
}
fn sidebar_union(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, u: &clean::Union) {
let mut sidebar = Buffer::new();
let fields = get_struct_fields_name(&u.fields);
if !fields.is_empty() {
sidebar.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#fields\">Fields</a></h3>\
<div class=\"sidebar-links\">",
);
for field in fields {
sidebar.push_str(&field);
}
sidebar.push_str("</div>");
}
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn sidebar_enum(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, e: &clean::Enum) {
let mut sidebar = Buffer::new();
let mut variants = e
.variants
.iter()
.filter_map(|v| match v.name {
Some(ref name) => Some(format!("<a href=\"#variant.{name}\">{name}</a>", name = name)),
_ => None,
})
.collect::<Vec<_>>();
if !variants.is_empty() {
variants.sort_unstable();
sidebar.push_str(&format!(
"<h3 class=\"sidebar-title\"><a href=\"#variants\">Variants</a></h3>\
<div class=\"sidebar-links\">{}</div>",
variants.join(""),
));
}
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn item_ty_to_strs(ty: ItemType) -> (&'static str, &'static str) {
match ty {
ItemType::ExternCrate | ItemType::Import => ("reexports", "Re-exports"),
ItemType::Module => ("modules", "Modules"),
ItemType::Struct => ("structs", "Structs"),
ItemType::Union => ("unions", "Unions"),
ItemType::Enum => ("enums", "Enums"),
ItemType::Function => ("functions", "Functions"),
ItemType::Typedef => ("types", "Type Definitions"),
ItemType::Static => ("statics", "Statics"),
ItemType::Constant => ("constants", "Constants"),
ItemType::Trait => ("traits", "Traits"),
ItemType::Impl => ("impls", "Implementations"),
ItemType::TyMethod => ("tymethods", "Type Methods"),
ItemType::Method => ("methods", "Methods"),
ItemType::StructField => ("fields", "Struct Fields"),
ItemType::Variant => ("variants", "Variants"),
ItemType::Macro => ("macros", "Macros"),
ItemType::Primitive => ("primitives", "Primitive Types"),
ItemType::AssocType => ("associated-types", "Associated Types"),
ItemType::AssocConst => ("associated-consts", "Associated Constants"),
ItemType::ForeignType => ("foreign-types", "Foreign Types"),
ItemType::Keyword => ("keywords", "Keywords"),
ItemType::OpaqueTy => ("opaque-types", "Opaque Types"),
ItemType::ProcAttribute => ("attributes", "Attribute Macros"),
ItemType::ProcDerive => ("derives", "Derive Macros"),
ItemType::TraitAlias => ("trait-aliases", "Trait aliases"),
}
}
fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
let mut sidebar = String::new();
// Re-exports are handled a bit differently because they can be extern crates or imports.
if items.iter().any(|it| {
it.name.is_some()
&& (it.type_() == ItemType::ExternCrate
|| (it.type_() == ItemType::Import && !it.is_stripped()))
}) {
let (id, name) = item_ty_to_strs(ItemType::Import);
sidebar.push_str(&format!("<li><a href=\"#{}\">{}</a></li>", id, name));
}
// ordering taken from item_module, reorder, where it prioritized elements in a certain order
// to print its headings
for &myty in &[
ItemType::Primitive,
ItemType::Module,
ItemType::Macro,
ItemType::Struct,
ItemType::Enum,
ItemType::Constant,
ItemType::Static,
ItemType::Trait,
ItemType::Function,
ItemType::Typedef,
ItemType::Union,
ItemType::Impl,
ItemType::TyMethod,
ItemType::Method,
ItemType::StructField,
ItemType::Variant,
ItemType::AssocType,
ItemType::AssocConst,
ItemType::ForeignType,
ItemType::Keyword,
] {
if items.iter().any(|it| !it.is_stripped() && it.type_() == myty && it.name.is_some()) {
let (id, name) = item_ty_to_strs(myty);
sidebar.push_str(&format!("<li><a href=\"#{}\">{}</a></li>", id, name));
}
}
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\"><ul>{}</ul></div>", sidebar);
}
}
fn sidebar_foreign_type(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
let mut sidebar = Buffer::new();
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
crate const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
/// Returns a list of all paths used in the type.
/// This is used to help deduplicate imported impls
/// for reexported types. If any of the contained
/// types are re-exported, we don't use the corresponding
/// entry from the js file, as inlining will have already
/// picked up the impl
fn collect_paths_for_type(first_ty: clean::Type, cache: &Cache) -> Vec<String> {
let mut out = Vec::new();
let mut visited = FxHashSet::default();
let mut work = VecDeque::new();
work.push_back(first_ty);
while let Some(ty) = work.pop_front() {
if !visited.insert(ty.clone()) {
continue;
}
match ty {
clean::Type::ResolvedPath { did, .. } => {
let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone());
let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern);
if let Some(path) = fqp {
out.push(path.join("::"));
}
}
clean::Type::Tuple(tys) => {
work.extend(tys.into_iter());
}
clean::Type::Slice(ty) => {
work.push_back(*ty);
}
clean::Type::Array(ty, _) => {
work.push_back(*ty);
}
clean::Type::RawPointer(_, ty) => {
work.push_back(*ty);
}
clean::Type::BorrowedRef { type_, .. } => {
work.push_back(*type_);
}
clean::Type::QPath { self_type, trait_, .. } => {
work.push_back(*self_type);
work.push_back(*trait_);
}
_ => {}
}
}
out
}
| 35.136717 | 140 | 0.504666 |
f4dd294669c4118b1e6b2c68f3cb054447e37b9f | 9,989 | use std::{ptr, mem, cmp};
use std::sync::atomic::{AtomicUsize, AtomicBool};
use std::sync::atomic::Ordering::{SeqCst};
use std::sync::{Mutex, Condvar};
use alloc::heap::{allocate, deallocate};
use std::cell::{Cell};
use select::{_Selectable, WaitQueue, Payload};
use alloc::{oom};
use {Error, Sendable};
const CACHE_LINE_SIZE: usize = 64;
struct CacheLinePad([u8; CACHE_LINE_SIZE]);
impl CacheLinePad {
fn new() -> CacheLinePad {
unsafe { mem::uninitialized() }
}
}
struct Node<T: Sendable> {
val: T,
pos: AtomicUsize,
}
#[repr(C)]
pub struct Packet<'a, T: Sendable+'a> {
// The id of this channel. The address of the `arc::Inner` that contains this channel.
id: Cell<usize>,
// The buffer we store the massages in.
buf: *mut Node<T>,
// One less than the capacity of the channel. Note that the capacity is a power of
// two.
cap_mask: usize,
next_write: Cell<usize>,
next_read: AtomicUsize,
// Is the sender sleeping?
have_sleeping_sender: AtomicBool,
// Condvar the sender is sleeping on.
send_condvar: Condvar,
// Number of receivers that are currently sleeping.
sleeping_receivers: AtomicUsize,
// Condvar the senders are sleeping on.
recv_condvar: Condvar,
sender_disconnected: AtomicBool,
num_receivers: AtomicUsize,
// Mutex that protects the two atomic variables above.
sleep_mutex: Mutex<()>,
// Is any one selecting on this channel?
wait_queue_used: AtomicBool,
wait_queue: Mutex<WaitQueue<'a>>,
}
impl<'a, T: Sendable+'a> Packet<'a, T> {
pub fn new(mut buf_size: usize) -> Packet<'a, T> {
buf_size = cmp::max(buf_size, 2);
let cap = buf_size.checked_next_power_of_two().unwrap_or(!0);
let size = cap.checked_mul(mem::size_of::<Node<T>>()).unwrap_or(!0);
let buf = unsafe { allocate(size, mem::align_of::<T>()) };
if buf.is_null() {
oom();
}
let packet = Packet {
id: Cell::new(0),
buf: buf as *mut Node<T>,
cap_mask: cap - 1,
next_write: Cell::new(0),
next_read: AtomicUsize::new(0),
have_sleeping_sender: AtomicBool::new(false),
send_condvar: Condvar::new(),
sleeping_receivers: AtomicUsize::new(0),
recv_condvar: Condvar::new(),
sender_disconnected: AtomicBool::new(false),
num_receivers: AtomicUsize::new(1),
sleep_mutex: Mutex::new(()),
wait_queue_used: AtomicBool::new(false),
wait_queue: Mutex::new(WaitQueue::new()),
};
for i in 0..cap {
packet.get_node(i).pos.store(i, SeqCst);
}
packet
}
/// Call this function before any other.
pub fn set_id(&self, id: usize) {
self.id.set(id);
self.wait_queue.lock().unwrap().set_id(id);
}
/// Call this function when the receiver is cloned.
pub fn add_receiver(&self) {
self.num_receivers.fetch_add(1, SeqCst);
}
/// Call this function when a receiver is dropped.
pub fn remove_receiver(&self) {
if self.num_receivers.fetch_sub(1, SeqCst) == 1 {
let _guard = self.sleep_mutex.lock().unwrap();
if self.have_sleeping_sender.load(SeqCst) {
self.send_condvar.notify_one();
}
}
}
/// Call this function when the producer is dropped.
pub fn remove_sender(&self) {
self.sender_disconnected.store(true, SeqCst);
let _guard = self.sleep_mutex.lock().unwrap();
if self.sleeping_receivers.load(SeqCst) > 0 {
self.recv_condvar.notify_all();
}
self.notify_wait_queue();
}
fn notify_wait_queue(&self) {
if self.wait_queue_used.load(SeqCst) {
let mut wait_queue = self.wait_queue.lock().unwrap();
if wait_queue.notify() == 0 {
self.wait_queue_used.store(false, SeqCst);
}
}
}
fn get_node(&self, pos: usize) -> &mut Node<T> {
unsafe { &mut *self.buf.offset((pos & self.cap_mask) as isize) }
}
/// Get a position to write to if the queue isn't full
fn get_write_pos(&self) -> Option<usize> {
let next_write = self.next_write.get();
let node = self.get_node(next_write);
let diff = node.pos.load(SeqCst) as isize - next_write as isize;
if diff < 0 {
None
} else {
assert!(diff == 0);
self.next_write.set(next_write + 1);
Some(next_write)
}
}
pub fn send_async(&self, val: T, have_lock: bool) -> Result<(), (T, Error)> {
if self.num_receivers.load(SeqCst) == 0 {
return Err((val, Error::Disconnected))
}
let write_pos = if let Some(w) = self.get_write_pos() {
w
} else {
return if self.num_receivers.load(SeqCst) == 0 {
Err((val, Error::Disconnected))
} else {
Err((val, Error::Full))
};
};
{
let node = self.get_node(write_pos);
unsafe { ptr::write(&mut node.val, val); }
node.pos.store(write_pos + 1, SeqCst);
}
if self.sleeping_receivers.load(SeqCst) > 0 {
if have_lock {
self.recv_condvar.notify_one();
} else {
let _guard = self.sleep_mutex.lock().unwrap();
self.recv_condvar.notify_one();
}
}
self.notify_wait_queue();
Ok(())
}
pub fn send_sync(&self, mut val: T) -> Result<(), (T, Error)> {
val = match self.send_async(val, false) {
Err((v, Error::Full)) => v,
e @ Err(_) => return e,
Ok(_) => return Ok(()),
};
let mut rv = Ok(());
let mut guard = self.sleep_mutex.lock().unwrap();
self.have_sleeping_sender.store(true, SeqCst);
loop {
val = match self.send_async(val, true) {
Err((v, Error::Full)) => v,
e @ Err(_) => { rv = e; break; },
Ok(_) => break,
};
guard = self.send_condvar.wait(guard).unwrap();
}
self.have_sleeping_sender.store(false, SeqCst);
rv
}
/// Get a position to read from if the queue isn't empty
fn get_read_pos(&self) -> Option<usize> {
let mut next_read = self.next_read.load(SeqCst);
loop {
let node = self.get_node(next_read);
let diff = node.pos.load(SeqCst) as isize - 1 - next_read as isize;
if diff < 0 {
return None;
} else if diff > 0 {
next_read = self.next_read.load(SeqCst);
} else {
let next_read_old = next_read;
next_read = self.next_read.compare_and_swap(next_read, next_read + 1,
SeqCst);
if next_read_old == next_read {
return Some(next_read);
}
}
}
}
pub fn recv_async(&self, have_lock: bool) -> Result<T, Error> {
let read_pos = if let Some(r) = self.get_read_pos() {
r
} else {
return if self.sender_disconnected.load(SeqCst) {
Err(Error::Disconnected)
} else {
Err(Error::Empty)
};
};
let val;
{
let node = self.get_node(read_pos);
val = unsafe { ptr::read(&node.val) };
node.pos.store(read_pos + self.cap_mask + 1, SeqCst);
}
if self.have_sleeping_sender.load(SeqCst) {
if have_lock {
self.send_condvar.notify_one();
} else {
let _guard = self.sleep_mutex.lock().unwrap();
self.send_condvar.notify_one();
}
}
Ok(val)
}
pub fn recv_sync(&self) -> Result<T, Error> {
match self.recv_async(false) {
Err(Error::Empty) => { },
e @ Err(_) => return e,
v @ Ok(_) => return v,
}
let rv;
let mut guard = self.sleep_mutex.lock().unwrap();
self.sleeping_receivers.fetch_add(1, SeqCst);
loop {
match self.recv_async(true) {
Err(Error::Empty) => { },
e @ Err(_) => { rv = e; break; },
v @ Ok(_) => { rv = v; break; },
}
guard = self.recv_condvar.wait(guard).unwrap();
}
self.sleeping_receivers.fetch_sub(1, SeqCst);
rv
}
}
unsafe impl<'a, T: Sendable+'a> Send for Packet<'a, T> { }
unsafe impl<'a, T: Sendable+'a> Sync for Packet<'a, T> { }
impl<'a, T: Sendable+'a> Drop for Packet<'a, T> {
fn drop(&mut self) {
while self.recv_async(false).is_ok() { }
unsafe {
deallocate(self.buf as *mut u8,
(self.cap_mask as usize + 1) * mem::size_of::<Node<T>>(),
mem::align_of::<Node<T>>());
}
}
}
unsafe impl<'a, T: Sendable+'a> _Selectable<'a> for Packet<'a, T> {
fn ready(&self) -> bool {
if self.sender_disconnected.load(SeqCst) {
return true;
}
let next_read = self.next_read.load(SeqCst);
let node = self.get_node(next_read);
node.pos.load(SeqCst) as isize - 1 - next_read as isize >= 0
}
fn register(&self, load: Payload<'a>) {
let mut wait_queue = self.wait_queue.lock().unwrap();
if wait_queue.add(load) > 0 {
self.wait_queue_used.store(true, SeqCst);
}
}
fn unregister(&self, id: usize) {
let mut wait_queue = self.wait_queue.lock().unwrap();
if wait_queue.remove(id) == 0 {
self.wait_queue_used.store(false, SeqCst);
}
}
}
| 30.454268 | 90 | 0.530584 |
ef0ea4e730ae098736a75b9c157ef720c061cdc9 | 63,744 | /*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// -----------------------------------------------
// This file is generated, Please do not edit it manually.
// Run the following in the root of the repo to regenerate:
//
// cargo make generate-api
// -----------------------------------------------
//! Watcher (Alerting) APIs
//!
//! Enable [watching for changes or anomalies in data and perform the necessary actions in response](https://www.elastic.co/guide/en/elasticsearch/reference/master/xpack-alerting.html),
//! by creating and managing watches that take action based on a met condition.
#![allow(unused_imports)]
use crate::{
client::Elasticsearch,
error::Error,
http::{
headers::{HeaderMap, HeaderName, HeaderValue, ACCEPT, CONTENT_TYPE},
request::{Body, JsonBody, NdBody, PARTS_ENCODED},
response::Response,
transport::Transport,
Method,
},
params::*,
};
use percent_encoding::percent_encode;
use serde::Serialize;
use std::{borrow::Cow, time::Duration};
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Ack Watch API"]
pub enum WatcherAckWatchParts<'b> {
#[doc = "WatchId"]
WatchId(&'b str),
#[doc = "WatchId and ActionId"]
WatchIdActionId(&'b str, &'b [&'b str]),
}
impl<'b> WatcherAckWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Ack Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherAckWatchParts::WatchId(ref watch_id) => {
let encoded_watch_id: Cow<str> =
percent_encode(watch_id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(21usize + encoded_watch_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_watch_id.as_ref());
p.push_str("/_ack");
p.into()
}
WatcherAckWatchParts::WatchIdActionId(ref watch_id, ref action_id) => {
let action_id_str = action_id.join(",");
let encoded_watch_id: Cow<str> =
percent_encode(watch_id.as_bytes(), PARTS_ENCODED).into();
let encoded_action_id: Cow<str> =
percent_encode(action_id_str.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(
22usize + encoded_watch_id.len() + encoded_action_id.len(),
);
p.push_str("/_watcher/watch/");
p.push_str(encoded_watch_id.as_ref());
p.push_str("/_ack/");
p.push_str(encoded_action_id.as_ref());
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Ack Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-ack-watch.html)\n\nAcknowledges a watch, manually throttling the execution of the watch's actions."]
#[derive(Clone, Debug)]
pub struct WatcherAckWatch<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherAckWatchParts<'b>,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherAckWatch<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherAckWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherAckWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherAckWatch {
transport,
parts,
headers,
body: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherAckWatch<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherAckWatch {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Ack Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Post;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Activate Watch API"]
pub enum WatcherActivateWatchParts<'b> {
#[doc = "WatchId"]
WatchId(&'b str),
}
impl<'b> WatcherActivateWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Activate Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherActivateWatchParts::WatchId(ref watch_id) => {
let encoded_watch_id: Cow<str> =
percent_encode(watch_id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(26usize + encoded_watch_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_watch_id.as_ref());
p.push_str("/_activate");
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Activate Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-activate-watch.html)\n\nActivates a currently inactive watch."]
#[derive(Clone, Debug)]
pub struct WatcherActivateWatch<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherActivateWatchParts<'b>,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherActivateWatch<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherActivateWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherActivateWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherActivateWatch {
transport,
parts,
headers,
body: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherActivateWatch<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherActivateWatch {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Activate Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Post;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Deactivate Watch API"]
pub enum WatcherDeactivateWatchParts<'b> {
#[doc = "WatchId"]
WatchId(&'b str),
}
impl<'b> WatcherDeactivateWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Deactivate Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherDeactivateWatchParts::WatchId(ref watch_id) => {
let encoded_watch_id: Cow<str> =
percent_encode(watch_id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(28usize + encoded_watch_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_watch_id.as_ref());
p.push_str("/_deactivate");
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Deactivate Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-deactivate-watch.html)\n\nDeactivates a currently active watch."]
#[derive(Clone, Debug)]
pub struct WatcherDeactivateWatch<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherDeactivateWatchParts<'b>,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherDeactivateWatch<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherDeactivateWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherDeactivateWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherDeactivateWatch {
transport,
parts,
headers,
body: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherDeactivateWatch<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherDeactivateWatch {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Deactivate Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Post;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Delete Watch API"]
pub enum WatcherDeleteWatchParts<'b> {
#[doc = "Id"]
Id(&'b str),
}
impl<'b> WatcherDeleteWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Delete Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherDeleteWatchParts::Id(ref id) => {
let encoded_id: Cow<str> = percent_encode(id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(16usize + encoded_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_id.as_ref());
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Delete Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-delete-watch.html)\n\nRemoves a watch from Watcher."]
#[derive(Clone, Debug)]
pub struct WatcherDeleteWatch<'a, 'b> {
transport: &'a Transport,
parts: WatcherDeleteWatchParts<'b>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b> WatcherDeleteWatch<'a, 'b> {
#[doc = "Creates a new instance of [WatcherDeleteWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherDeleteWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherDeleteWatch {
transport,
parts,
headers,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Delete Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Delete;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = Option::<()>::None;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Execute Watch API"]
pub enum WatcherExecuteWatchParts<'b> {
#[doc = "Id"]
Id(&'b str),
#[doc = "No parts"]
None,
}
impl<'b> WatcherExecuteWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Execute Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherExecuteWatchParts::Id(ref id) => {
let encoded_id: Cow<str> = percent_encode(id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(25usize + encoded_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_id.as_ref());
p.push_str("/_execute");
p.into()
}
WatcherExecuteWatchParts::None => "/_watcher/watch/_execute".into(),
}
}
}
#[doc = "Builder for the [Watcher Execute Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-execute-watch.html)\n\nForces the execution of a stored watch."]
#[derive(Clone, Debug)]
pub struct WatcherExecuteWatch<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherExecuteWatchParts<'b>,
body: Option<B>,
debug: Option<bool>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherExecuteWatch<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherExecuteWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherExecuteWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherExecuteWatch {
transport,
parts,
headers,
body: None,
debug: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherExecuteWatch<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherExecuteWatch {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
debug: self.debug,
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "indicates whether the watch should execute in debug mode"]
pub fn debug(mut self, debug: bool) -> Self {
self.debug = Some(debug);
self
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Execute Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Post;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
debug: Option<bool>,
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
debug: self.debug,
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Get Watch API"]
pub enum WatcherGetWatchParts<'b> {
#[doc = "Id"]
Id(&'b str),
}
impl<'b> WatcherGetWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Get Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherGetWatchParts::Id(ref id) => {
let encoded_id: Cow<str> = percent_encode(id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(16usize + encoded_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_id.as_ref());
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Get Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-get-watch.html)\n\nRetrieves a watch by its ID."]
#[derive(Clone, Debug)]
pub struct WatcherGetWatch<'a, 'b> {
transport: &'a Transport,
parts: WatcherGetWatchParts<'b>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b> WatcherGetWatch<'a, 'b> {
#[doc = "Creates a new instance of [WatcherGetWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherGetWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherGetWatch {
transport,
parts,
headers,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Get Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Get;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = Option::<()>::None;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Put Watch API"]
pub enum WatcherPutWatchParts<'b> {
#[doc = "Id"]
Id(&'b str),
}
impl<'b> WatcherPutWatchParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Put Watch API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherPutWatchParts::Id(ref id) => {
let encoded_id: Cow<str> = percent_encode(id.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(16usize + encoded_id.len());
p.push_str("/_watcher/watch/");
p.push_str(encoded_id.as_ref());
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Put Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-put-watch.html)\n\nCreates a new watch, or updates an existing one."]
#[derive(Clone, Debug)]
pub struct WatcherPutWatch<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherPutWatchParts<'b>,
active: Option<bool>,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
if_primary_term: Option<i64>,
if_seq_no: Option<i64>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
version: Option<i64>,
}
impl<'a, 'b, B> WatcherPutWatch<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherPutWatch] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherPutWatchParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherPutWatch {
transport,
parts,
headers,
active: None,
body: None,
error_trace: None,
filter_path: None,
human: None,
if_primary_term: None,
if_seq_no: None,
pretty: None,
request_timeout: None,
source: None,
version: None,
}
}
#[doc = "Specify whether the watch is in/active by default"]
pub fn active(mut self, active: bool) -> Self {
self.active = Some(active);
self
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherPutWatch<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherPutWatch {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
active: self.active,
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
if_primary_term: self.if_primary_term,
if_seq_no: self.if_seq_no,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
version: self.version,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "only update the watch if the last operation that has changed the watch has the specified primary term"]
pub fn if_primary_term(mut self, if_primary_term: i64) -> Self {
self.if_primary_term = Some(if_primary_term);
self
}
#[doc = "only update the watch if the last operation that has changed the watch has the specified sequence number"]
pub fn if_seq_no(mut self, if_seq_no: i64) -> Self {
self.if_seq_no = Some(if_seq_no);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Explicit version number for concurrency control"]
pub fn version(mut self, version: i64) -> Self {
self.version = Some(version);
self
}
#[doc = "Creates an asynchronous call to the Watcher Put Watch API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Put;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
active: Option<bool>,
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
if_primary_term: Option<i64>,
if_seq_no: Option<i64>,
pretty: Option<bool>,
source: Option<&'b str>,
version: Option<i64>,
}
let query_params = QueryParams {
active: self.active,
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
if_primary_term: self.if_primary_term,
if_seq_no: self.if_seq_no,
pretty: self.pretty,
source: self.source,
version: self.version,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Query Watches API"]
pub enum WatcherQueryWatchesParts {
#[doc = "No parts"]
None,
}
impl WatcherQueryWatchesParts {
#[doc = "Builds a relative URL path to the Watcher Query Watches API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherQueryWatchesParts::None => "/_watcher/_query/watches".into(),
}
}
}
#[doc = "Builder for the [Watcher Query Watches API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-query-watches.html)\n\nRetrieves stored watches."]
#[derive(Clone, Debug)]
pub struct WatcherQueryWatches<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherQueryWatchesParts,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherQueryWatches<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherQueryWatches]"]
pub fn new(transport: &'a Transport) -> Self {
let headers = HeaderMap::new();
WatcherQueryWatches {
transport,
parts: WatcherQueryWatchesParts::None,
headers,
body: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherQueryWatches<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherQueryWatches {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Query Watches API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = match self.body {
Some(_) => Method::Post,
None => Method::Get,
};
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Start API"]
pub enum WatcherStartParts {
#[doc = "No parts"]
None,
}
impl WatcherStartParts {
#[doc = "Builds a relative URL path to the Watcher Start API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherStartParts::None => "/_watcher/_start".into(),
}
}
}
#[doc = "Builder for the [Watcher Start API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-start.html)\n\nStarts Watcher if it is not already running."]
#[derive(Clone, Debug)]
pub struct WatcherStart<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherStartParts,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherStart<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherStart]"]
pub fn new(transport: &'a Transport) -> Self {
let headers = HeaderMap::new();
WatcherStart {
transport,
parts: WatcherStartParts::None,
headers,
body: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherStart<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherStart {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Start API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Post;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Stats API"]
pub enum WatcherStatsParts<'b> {
#[doc = "No parts"]
None,
#[doc = "Metric"]
Metric(&'b [&'b str]),
}
impl<'b> WatcherStatsParts<'b> {
#[doc = "Builds a relative URL path to the Watcher Stats API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherStatsParts::None => "/_watcher/stats".into(),
WatcherStatsParts::Metric(ref metric) => {
let metric_str = metric.join(",");
let encoded_metric: Cow<str> =
percent_encode(metric_str.as_bytes(), PARTS_ENCODED).into();
let mut p = String::with_capacity(16usize + encoded_metric.len());
p.push_str("/_watcher/stats/");
p.push_str(encoded_metric.as_ref());
p.into()
}
}
}
}
#[doc = "Builder for the [Watcher Stats API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-stats.html)\n\nRetrieves the current Watcher metrics."]
#[derive(Clone, Debug)]
pub struct WatcherStats<'a, 'b> {
transport: &'a Transport,
parts: WatcherStatsParts<'b>,
emit_stacktraces: Option<bool>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
metric: Option<&'b [&'b str]>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b> WatcherStats<'a, 'b> {
#[doc = "Creates a new instance of [WatcherStats] with the specified API parts"]
pub fn new(transport: &'a Transport, parts: WatcherStatsParts<'b>) -> Self {
let headers = HeaderMap::new();
WatcherStats {
transport,
parts,
headers,
emit_stacktraces: None,
error_trace: None,
filter_path: None,
human: None,
metric: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "Emits stack traces of currently running watches"]
pub fn emit_stacktraces(mut self, emit_stacktraces: bool) -> Self {
self.emit_stacktraces = Some(emit_stacktraces);
self
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Controls what additional stat metrics should be include in the response"]
pub fn metric(mut self, metric: &'b [&'b str]) -> Self {
self.metric = Some(metric);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Stats API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Get;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
emit_stacktraces: Option<bool>,
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
metric: Option<&'b [&'b str]>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
emit_stacktraces: self.emit_stacktraces,
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
metric: self.metric,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = Option::<()>::None;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[derive(Debug, Clone, PartialEq)]
#[doc = "API parts for the Watcher Stop API"]
pub enum WatcherStopParts {
#[doc = "No parts"]
None,
}
impl WatcherStopParts {
#[doc = "Builds a relative URL path to the Watcher Stop API"]
pub fn url(self) -> Cow<'static, str> {
match self {
WatcherStopParts::None => "/_watcher/_stop".into(),
}
}
}
#[doc = "Builder for the [Watcher Stop API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-stop.html)\n\nStops Watcher if it is running."]
#[derive(Clone, Debug)]
pub struct WatcherStop<'a, 'b, B> {
transport: &'a Transport,
parts: WatcherStopParts,
body: Option<B>,
error_trace: Option<bool>,
filter_path: Option<&'b [&'b str]>,
headers: HeaderMap,
human: Option<bool>,
pretty: Option<bool>,
request_timeout: Option<Duration>,
source: Option<&'b str>,
}
impl<'a, 'b, B> WatcherStop<'a, 'b, B>
where
B: Body,
{
#[doc = "Creates a new instance of [WatcherStop]"]
pub fn new(transport: &'a Transport) -> Self {
let headers = HeaderMap::new();
WatcherStop {
transport,
parts: WatcherStopParts::None,
headers,
body: None,
error_trace: None,
filter_path: None,
human: None,
pretty: None,
request_timeout: None,
source: None,
}
}
#[doc = "The body for the API call"]
pub fn body<T>(self, body: T) -> WatcherStop<'a, 'b, JsonBody<T>>
where
T: Serialize,
{
WatcherStop {
transport: self.transport,
parts: self.parts,
body: Some(body.into()),
error_trace: self.error_trace,
filter_path: self.filter_path,
headers: self.headers,
human: self.human,
pretty: self.pretty,
request_timeout: self.request_timeout,
source: self.source,
}
}
#[doc = "Include the stack trace of returned errors."]
pub fn error_trace(mut self, error_trace: bool) -> Self {
self.error_trace = Some(error_trace);
self
}
#[doc = "A comma-separated list of filters used to reduce the response."]
pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self {
self.filter_path = Some(filter_path);
self
}
#[doc = "Adds a HTTP header"]
pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {
self.headers.insert(key, value);
self
}
#[doc = "Return human readable values for statistics."]
pub fn human(mut self, human: bool) -> Self {
self.human = Some(human);
self
}
#[doc = "Pretty format the returned JSON response."]
pub fn pretty(mut self, pretty: bool) -> Self {
self.pretty = Some(pretty);
self
}
#[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."]
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = Some(timeout);
self
}
#[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."]
pub fn source(mut self, source: &'b str) -> Self {
self.source = Some(source);
self
}
#[doc = "Creates an asynchronous call to the Watcher Stop API that can be awaited"]
pub async fn send(self) -> Result<Response, Error> {
let path = self.parts.url();
let method = Method::Post;
let headers = self.headers;
let timeout = self.request_timeout;
let query_string = {
#[serde_with::skip_serializing_none]
#[derive(Serialize)]
struct QueryParams<'b> {
error_trace: Option<bool>,
#[serde(serialize_with = "crate::client::serialize_coll_qs")]
filter_path: Option<&'b [&'b str]>,
human: Option<bool>,
pretty: Option<bool>,
source: Option<&'b str>,
}
let query_params = QueryParams {
error_trace: self.error_trace,
filter_path: self.filter_path,
human: self.human,
pretty: self.pretty,
source: self.source,
};
Some(query_params)
};
let body = self.body;
let response = self
.transport
.send(method, &path, headers, query_string.as_ref(), body, timeout)
.await?;
Ok(response)
}
}
#[doc = "Namespace client for Watcher APIs"]
pub struct Watcher<'a> {
transport: &'a Transport,
}
impl<'a> Watcher<'a> {
#[doc = "Creates a new instance of [Watcher]"]
pub fn new(transport: &'a Transport) -> Self {
Self { transport }
}
pub fn transport(&self) -> &Transport {
self.transport
}
#[doc = "[Watcher Ack Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-ack-watch.html)\n\nAcknowledges a watch, manually throttling the execution of the watch's actions."]
pub fn ack_watch<'b>(&'a self, parts: WatcherAckWatchParts<'b>) -> WatcherAckWatch<'a, 'b, ()> {
WatcherAckWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Activate Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-activate-watch.html)\n\nActivates a currently inactive watch."]
pub fn activate_watch<'b>(
&'a self,
parts: WatcherActivateWatchParts<'b>,
) -> WatcherActivateWatch<'a, 'b, ()> {
WatcherActivateWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Deactivate Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-deactivate-watch.html)\n\nDeactivates a currently active watch."]
pub fn deactivate_watch<'b>(
&'a self,
parts: WatcherDeactivateWatchParts<'b>,
) -> WatcherDeactivateWatch<'a, 'b, ()> {
WatcherDeactivateWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Delete Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-delete-watch.html)\n\nRemoves a watch from Watcher."]
pub fn delete_watch<'b>(
&'a self,
parts: WatcherDeleteWatchParts<'b>,
) -> WatcherDeleteWatch<'a, 'b> {
WatcherDeleteWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Execute Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-execute-watch.html)\n\nForces the execution of a stored watch."]
pub fn execute_watch<'b>(
&'a self,
parts: WatcherExecuteWatchParts<'b>,
) -> WatcherExecuteWatch<'a, 'b, ()> {
WatcherExecuteWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Get Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-get-watch.html)\n\nRetrieves a watch by its ID."]
pub fn get_watch<'b>(&'a self, parts: WatcherGetWatchParts<'b>) -> WatcherGetWatch<'a, 'b> {
WatcherGetWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Put Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-put-watch.html)\n\nCreates a new watch, or updates an existing one."]
pub fn put_watch<'b>(&'a self, parts: WatcherPutWatchParts<'b>) -> WatcherPutWatch<'a, 'b, ()> {
WatcherPutWatch::new(self.transport(), parts)
}
#[doc = "[Watcher Query Watches API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-query-watches.html)\n\nRetrieves stored watches."]
pub fn query_watches<'b>(&'a self) -> WatcherQueryWatches<'a, 'b, ()> {
WatcherQueryWatches::new(self.transport())
}
#[doc = "[Watcher Start API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-start.html)\n\nStarts Watcher if it is not already running."]
pub fn start<'b>(&'a self) -> WatcherStart<'a, 'b, ()> {
WatcherStart::new(self.transport())
}
#[doc = "[Watcher Stats API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-stats.html)\n\nRetrieves the current Watcher metrics."]
pub fn stats<'b>(&'a self, parts: WatcherStatsParts<'b>) -> WatcherStats<'a, 'b> {
WatcherStats::new(self.transport(), parts)
}
#[doc = "[Watcher Stop API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-stop.html)\n\nStops Watcher if it is running."]
pub fn stop<'b>(&'a self) -> WatcherStop<'a, 'b, ()> {
WatcherStop::new(self.transport())
}
}
impl Elasticsearch {
#[doc = "Creates a namespace client for Watcher APIs"]
pub fn watcher(&self) -> Watcher {
Watcher::new(self.transport())
}
}
| 37.696038 | 221 | 0.580792 |
72dcea0bd9c9365142117a1dda89f60d07d9e756 | 6,696 | use syscall::{self, O_CLOEXEC, O_STAT, O_CREAT, O_EXCL, O_DIRECTORY, O_WRONLY, O_NOFOLLOW, TimeSpec};
use core::slice;
use libc::{c_int, c_char, off_t, mode_t, size_t, ssize_t};
use ::types::{utimbuf, timeval};
pub const PATH_MAX: usize = 4096;
libc_fn!(unsafe access(path: *mut c_char, _amode: c_int) -> Result<c_int> {
// XXX amode
::RawFile::open(::cstr_to_slice(path), O_CLOEXEC | O_STAT)?;
Ok(0)
});
libc_fn!(unsafe _close(file: c_int) -> Result<c_int> {
Ok(syscall::close(file as usize)? as c_int)
});
libc_fn!(unsafe dup(file: c_int) -> Result<c_int> {
Ok(syscall::dup(file as usize, &[])? as c_int)
});
libc_fn!(unsafe dup2(file: c_int, newfile: c_int) -> Result<c_int> {
Ok(syscall::dup2(file as usize, newfile as usize, &[])? as c_int)
});
libc_fn!(unsafe _fstat(file: c_int, st: *mut syscall::Stat) -> Result<c_int> {
Ok(syscall::fstat(file as usize, &mut *st)? as c_int)
});
libc_fn!(unsafe _fsync(file: c_int) -> Result<c_int> {
Ok(syscall::fsync(file as usize)? as c_int)
});
libc_fn!(unsafe ftruncate(file: c_int, len: off_t) -> Result<c_int> {
Ok(syscall::ftruncate(file as usize, len as usize)? as c_int)
});
libc_fn!(unsafe _lseek(file: c_int, ptr: off_t, dir: c_int) -> Result<off_t> {
Ok(syscall::lseek(file as usize, ptr as isize, dir as usize)? as off_t)
});
libc_fn!(unsafe mkdir(path: *mut c_char, mode: mode_t) -> Result<c_int> {
let flags = O_CREAT | O_EXCL | O_CLOEXEC | O_DIRECTORY | (mode as usize & 0o777);
::RawFile::open(::cstr_to_slice(path), flags)?;
Ok(0)
});
libc_fn!(unsafe _open(path: *mut c_char, flags: c_int, mode: mode_t) -> Result<c_int> {
let mut path = ::cstr_to_slice(path);
// XXX hack; use better method if possible
if path == b"/dev/null" {
path = b"null:"
}
Ok(syscall::open(path, flags as usize | (mode as usize & 0o777))? as c_int)
});
libc_fn!(unsafe pipe(pipefd: *mut [c_int; 2]) -> c_int {
pipe2(pipefd, 0)
});
libc_fn!(unsafe pipe2(pipefd: *mut [c_int; 2], flags: c_int) -> Result<c_int> {
let mut syspipefd = [(*pipefd)[0] as usize, (*pipefd)[1] as usize];
syscall::pipe2(&mut syspipefd, flags as usize)?;
(*pipefd)[0] = syspipefd[0] as c_int;
(*pipefd)[1] = syspipefd[1] as c_int;
Ok(0)
});
libc_fn!(unsafe _read(file: c_int, buf: *mut c_char, len: c_int) -> Result<c_int> {
let buf = slice::from_raw_parts_mut(buf as *mut u8, len as usize);
Ok(syscall::read(file as usize, buf)? as c_int)
});
libc_fn!(unsafe rmdir(path: *mut c_char) -> Result<c_int> {
Ok(syscall::rmdir(::cstr_to_slice(path))? as c_int)
});
libc_fn!(unsafe _stat(path: *const c_char, st: *mut syscall::Stat) -> Result<c_int> {
let fd = ::RawFile::open(::cstr_to_slice(path), O_CLOEXEC | O_STAT)?;
Ok(syscall::fstat(*fd, &mut *st)? as c_int)
});
libc_fn!(unsafe lstat(path: *const c_char, st: *mut syscall::Stat) -> Result<c_int> {
let fd = ::RawFile::open(::cstr_to_slice(path), O_CLOEXEC | O_STAT | O_NOFOLLOW)?;
Ok(syscall::fstat(*fd, &mut *st)? as c_int)
});
libc_fn!(unsafe _unlink(path: *mut c_char) -> Result<c_int> {
Ok(syscall::unlink(::cstr_to_slice(path))? as c_int)
});
libc_fn!(unsafe _write(file: c_int, buf: *const c_char, len: c_int) -> Result<c_int> {
let buf = slice::from_raw_parts(buf as *const u8, len as usize);
Ok(syscall::write(file as usize, buf)? as c_int)
});
libc_fn!(unsafe chmod(path: *mut c_char, mode: mode_t) -> Result<c_int> {
Ok(syscall::chmod(::cstr_to_slice(path), mode as usize)? as c_int)
});
libc_fn!(unsafe realpath(path: *const c_char, resolved_path: *mut c_char) -> Result<*mut c_char> {
let fd = ::RawFile::open(::cstr_to_slice(path), O_STAT)?;
let resolved_path = ::MallocNull::new(resolved_path, PATH_MAX);
let buf = slice::from_raw_parts_mut(resolved_path.as_mut_ptr() as *mut u8, PATH_MAX-1);
let length = syscall::fpath(*fd, buf)?;
buf[length] = b'\0';
Ok(resolved_path.into_raw())
});
libc_fn!(unsafe _rename(old: *const c_char, new: *const c_char) -> Result<c_int> {
// XXX fix this horror when the kernel provides rename() or link()
let old = ::cstr_to_slice(old);
let new = ::cstr_to_slice(new);
let buf = ::file_read_all(old)?;
let mut stat = syscall::Stat::default();
let fd = ::RawFile::open(old, syscall::O_STAT)?;
syscall::fstat(*fd, &mut stat)?;
drop(fd);
let mode = (stat.st_mode & 0o777) as usize;
let fd = ::RawFile::open(new, O_CREAT | O_WRONLY | mode)?;
syscall::write(*fd, &buf)?;
syscall::unlink(old)?;
Ok(0)
});
libc_fn!(fsync(fd: c_int) -> Result<c_int> {
Ok(syscall::fsync(fd as usize)? as c_int)
});
libc_fn!(unsafe symlink(path1: *const c_char, path2: *const c_char) -> Result<c_int> {
let fd = ::RawFile::open(::cstr_to_slice(path2), syscall::O_SYMLINK | syscall::O_CREAT | syscall::O_WRONLY | 0o777)?;
syscall::write(*fd, ::cstr_to_slice(path1))?;
Ok(0)
});
libc_fn!(unsafe readlink(path: *const c_char, buf: *const c_char, bufsize: size_t) -> Result<ssize_t> {
let fd = ::RawFile::open(::cstr_to_slice(path), syscall::O_SYMLINK | syscall::O_RDONLY)?;
let count = syscall::read(*fd, slice::from_raw_parts_mut(buf as *mut u8, bufsize))?;
Ok(count as ssize_t)
});
libc_fn!(unsafe utime(path: *mut c_char, times: *const utimbuf) -> Result<c_int> {
let times = if times.is_null() {
let mut tp = TimeSpec::default();
syscall::clock_gettime(syscall::flag::CLOCK_REALTIME, &mut tp)?;
[tp, tp]
} else {
[TimeSpec { tv_sec: (*times).actime, tv_nsec: 0 },
TimeSpec { tv_sec: (*times).modtime, tv_nsec: 0 }]
};
let fd = ::RawFile::open(::cstr_to_slice(path), 0)?;
syscall::futimens(*fd, ×)?;
Ok(0)
});
libc_fn!(unsafe utimes(path: *mut c_char, times: *const [timeval; 2]) -> Result<c_int> {
let times = [TimeSpec { tv_sec: (*times)[0].tv_sec, tv_nsec: (*times)[0].tv_usec as i32 * 1000 },
TimeSpec { tv_sec: (*times)[1].tv_sec, tv_nsec: (*times)[0].tv_usec as i32 * 1000 }];
let fd = ::RawFile::open(::cstr_to_slice(path), 0)?;
syscall::futimens(*fd, ×)?;
Ok(0)
});
libc_fn!(unsafe futimens(fd: c_int, times: *const [TimeSpec; 2]) -> Result<c_int> {
// XXX UTIME_NOW and UTIME_OMIT (in redoxfs?)
syscall::futimens(fd as usize, &*times)?;
Ok(0)
});
// XXX variadic
libc_fn!(_fcntl(file: c_int, cmd: c_int, arg: c_int) -> Result<c_int> {
Ok(syscall::fcntl(file as usize, cmd as usize, arg as usize)? as c_int)
});
libc_fn!(_isatty(file: c_int) -> c_int {
if let Ok(fd) = syscall::dup(file as usize, b"termios") {
let _ = syscall::close(fd);
1
} else {
0
}
});
| 35.428571 | 121 | 0.635454 |
effc861e0fd48720692f6a97e37537ee15515198 | 2,023 | use std::io::{Result, Write};
use std::fs::{File, OpenOptions, read_dir};
use chrono::{DateTime, Utc};
fn main() {
println!("cargo:rerun-if-changed=./src/");
updata_version_number().unwrap();
insert_app_data().unwrap();
}
fn updata_version_number() -> Result<()> {
let now: DateTime<Utc> = Utc::now();
let mut fo = OpenOptions::new()
.write(true)
.create(true)
.open("src/version.rs")
.unwrap();
let ni = format!(r#"//! This is a uname constant, and will be update automatically on building.
/// NOTE: following line will be found and modified by build.rs. ***DONT CHANGE THIS LINE MANUALLY!!!!***
pub const VERSION : &[u8] = b"{}\0";"#, now.to_rfc2822());
writeln!(fo, "{}", ni)?;
Ok(())
}
static TARGET_PATH: &str = "built_in_elfs/";
fn insert_app_data() -> Result<()> {
let mut f = File::create("src/link_app.asm").unwrap();
let mut apps: Vec<_> = read_dir(TARGET_PATH)
.unwrap()
.into_iter()
.map(|dir_entry| {
dir_entry.unwrap().file_name().into_string().unwrap()
}).filter(|name|
name == "proc0"
)
.collect();
apps.sort();
writeln!(f, r#"
.align 3
.section .data
.global _num_app
_num_app:
.quad {}"#, apps.len())?;
for i in 0..apps.len() {
writeln!(f, r#" .quad app_{}_start"#, i)?;
}
if apps.len() > 0 {
writeln!(f, r#" .quad app_{}_end"#, apps.len() - 1)?;
}
writeln!(f, r#"
.global _app_names
_app_names:"#)?;
for app in apps.iter() {
writeln!(f, r#" .string "{}""#, app)?;
}
for (idx, app) in apps.iter().enumerate() {
println!("app_{}: {}", idx, app);
writeln!(f, r#"
.section .data
.global app_{0}_start
.global app_{0}_end
.align 3
app_{0}_start:
.incbin "{2}{1}"
app_{0}_end:"#, idx, app, TARGET_PATH)?;
}
writeln!(f, "# Try to make cargo happy: last compiled @ {}", Utc::now().to_rfc2822()).unwrap();
Ok(())
} | 26.973333 | 105 | 0.544241 |
161431ca56347a1320c86df71c026053cb9ca20a | 325 | // Copyright 2020-2021 The FuseQuery Authors.
//
// SPDX-License-Identifier: Apache-2.0.
use warp::Filter;
use crate::configs::Config;
pub fn hello_handler(
cfg: Config,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::path!("v1" / "hello").map(move || format!("{:?}", cfg))
}
| 23.214286 | 79 | 0.646154 |
69038d348e25ab4223bdb40cf666ff8794c64972 | 11,948 | //! Protocol logic specific to processing ICS3 messages of type `MsgConnectionOpenAck`.
use crate::core::ics03_connection::connection::{ConnectionEnd, Counterparty, State};
use crate::core::ics03_connection::context::ConnectionReader;
use crate::core::ics03_connection::error::Error;
use crate::core::ics03_connection::events::Attributes;
use crate::core::ics03_connection::handler::verify::{
check_client_consensus_height, verify_proofs,
};
use crate::core::ics03_connection::handler::{ConnectionIdState, ConnectionResult};
use crate::core::ics03_connection::msgs::conn_open_ack::MsgConnectionOpenAck;
use crate::events::IbcEvent;
use crate::handler::{HandlerOutput, HandlerResult};
use crate::prelude::*;
pub(crate) fn process(
ctx: &dyn ConnectionReader,
msg: MsgConnectionOpenAck,
) -> HandlerResult<ConnectionResult, Error> {
let mut output = HandlerOutput::builder();
// Check the client's (consensus state) proof height.
check_client_consensus_height(ctx, msg.consensus_height())?;
// Validate the connection end.
let mut conn_end = ctx.connection_end(msg.connection_id())?;
// A connection end must be Init or TryOpen; otherwise we return an error.
let state_is_consistent = conn_end.state_matches(&State::Init)
&& conn_end.versions().contains(msg.version())
|| conn_end.state_matches(&State::TryOpen)
&& conn_end.versions().get(0).eq(&Some(msg.version()));
if !state_is_consistent {
// Old connection end is in incorrect state, propagate the error.
return Err(Error::connection_mismatch(msg.connection_id().clone()));
}
// Set the connection ID of the counterparty
let prev_counterparty = conn_end.counterparty();
let counterparty = Counterparty::new(
prev_counterparty.client_id().clone(),
Some(msg.connection_id().clone()),
prev_counterparty.prefix().clone(),
);
conn_end.set_state(State::Open);
conn_end.set_version(msg.version().clone());
conn_end.set_counterparty(counterparty);
// The counterparty is the local chain.
let counterparty = Counterparty::new(
conn_end.client_id().clone(), // The local client identifier.
Some(msg.counterparty_connection_id().clone()), // This chain's connection id as known on counterparty.
ctx.commitment_prefix(), // Local commitment prefix.
);
// Proof verification.
let expected_conn = ConnectionEnd::new(
State::TryOpen,
conn_end.counterparty().client_id().clone(),
counterparty,
vec![msg.version().clone()],
conn_end.delay_period(),
);
// 2. Pass the details to the verification function.
verify_proofs(
ctx,
msg.client_state(),
msg.proofs().height(),
&conn_end,
&expected_conn,
msg.proofs(),
)?;
output.log("success: connection verification passed");
let result = ConnectionResult {
connection_id: msg.connection_id().clone(),
connection_id_state: ConnectionIdState::Reused,
connection_end: conn_end,
};
let event_attributes = Attributes {
connection_id: Some(result.connection_id.clone()),
..Default::default()
};
output.emit(IbcEvent::OpenAckConnection(event_attributes.into()));
Ok(output.with_result(result))
}
#[cfg(test)]
mod tests {
use crate::prelude::*;
use core::str::FromStr;
use test_log::test;
use crate::core::ics03_connection::connection::{ConnectionEnd, Counterparty, State};
use crate::core::ics03_connection::error;
use crate::core::ics03_connection::handler::{dispatch, ConnectionResult};
use crate::core::ics03_connection::msgs::conn_open_ack::test_util::get_dummy_raw_msg_conn_open_ack;
use crate::core::ics03_connection::msgs::conn_open_ack::MsgConnectionOpenAck;
use crate::core::ics03_connection::msgs::ConnectionMsg;
use crate::core::ics23_commitment::commitment::CommitmentPrefix;
use crate::core::ics24_host::identifier::{ChainId, ClientId};
use crate::events::IbcEvent;
use crate::mock::context::MockContext;
use crate::mock::host::HostType;
use crate::timestamp::ZERO_DURATION;
#[test]
fn conn_open_ack_msg_processing() {
struct Test {
name: String,
ctx: MockContext,
msg: ConnectionMsg,
want_pass: bool,
match_error: Box<dyn FnOnce(error::Error)>,
}
let msg_ack =
MsgConnectionOpenAck::try_from(get_dummy_raw_msg_conn_open_ack(10, 10)).unwrap();
let conn_id = msg_ack.connection_id.clone();
// Client parameters -- identifier and correct height (matching the proof height)
let client_id = ClientId::from_str("mock_clientid").unwrap();
let proof_height = msg_ack.proofs.height();
// Parametrize the host chain to have a height at least as recent as the
// the height of the proofs in the Ack msg.
let latest_height = proof_height.increment();
let max_history_size = 5;
let default_context = MockContext::new(
ChainId::new("mockgaia".to_string(), latest_height.revision_number),
HostType::Mock,
max_history_size,
latest_height,
);
// A connection end that will exercise the successful path.
let default_conn_end = ConnectionEnd::new(
State::Init,
client_id.clone(),
Counterparty::new(
client_id.clone(),
Some(msg_ack.counterparty_connection_id().clone()),
CommitmentPrefix::from(b"ibc".to_vec()),
),
vec![msg_ack.version().clone()],
ZERO_DURATION,
);
// A connection end with incorrect state `Open`; will be part of the context.
let mut conn_end_open = default_conn_end.clone();
conn_end_open.set_state(State::Open); // incorrect field
// A connection end with correct state, but incorrect prefix for the
// counterparty; will be part of the context to exercise unsuccessful path.
let mut conn_end_prefix = conn_end_open.clone();
conn_end_prefix.set_state(State::Init);
conn_end_prefix.set_counterparty(Counterparty::new(
client_id.clone(),
Some(msg_ack.counterparty_connection_id().clone()),
CommitmentPrefix::from(Vec::new()), // incorrect field
));
let tests: Vec<Test> = vec![
Test {
name: "Successful processing of an Ack message".to_string(),
ctx: default_context
.clone()
.with_client(&client_id, proof_height)
.with_connection(conn_id.clone(), default_conn_end),
msg: ConnectionMsg::ConnectionOpenAck(Box::new(msg_ack.clone())),
want_pass: true,
match_error: Box::new(|_| {
panic!("should not have error")
}),
},
Test {
name: "Processing fails because the connection does not exist in the context".to_string(),
ctx: default_context.clone(),
msg: ConnectionMsg::ConnectionOpenAck(Box::new(msg_ack.clone())),
want_pass: false,
match_error: {
let connection_id = conn_id.clone();
Box::new(move |e| {
match e.detail() {
error::ErrorDetail::ConnectionNotFound(e) => {
assert_eq!(e.connection_id, connection_id)
}
_ => {
panic!("Expected ConnectionNotFound error");
}
}
})
},
},
Test {
name: "Processing fails due to connections mismatch (incorrect 'open' state)".to_string(),
ctx: default_context
.clone()
.with_client(&client_id, proof_height)
.with_connection(conn_id.clone(), conn_end_open),
msg: ConnectionMsg::ConnectionOpenAck(Box::new(msg_ack.clone())),
want_pass: false,
match_error: {
let connection_id = conn_id.clone();
Box::new(move |e| {
match e.detail() {
error::ErrorDetail::ConnectionMismatch(e) => {
assert_eq!(e.connection_id, connection_id);
}
_ => {
panic!("Expected ConnectionMismatch error");
}
}
})
},
},
Test {
name: "Processing fails: ConsensusStateVerificationFailure due to empty counterparty prefix".to_string(),
ctx: default_context
.with_client(&client_id, proof_height)
.with_connection(conn_id, conn_end_prefix),
msg: ConnectionMsg::ConnectionOpenAck(Box::new(msg_ack)),
want_pass: false,
match_error:
Box::new(move |e| {
match e.detail() {
error::ErrorDetail::ConsensusStateVerificationFailure(e) => {
assert_eq!(e.height, proof_height)
}
_ => {
panic!("Expected ConsensusStateVerificationFailure error");
}
}
}),
},
/*
Test {
name: "Processing fails due to MissingLocalConsensusState".to_string(),
ctx: MockContext::default()
.with_client(&client_id, proof_height)
.with_connection(conn_id, default_conn_end),
msg: ConnectionMsg::ConnectionOpenAck(Box::new(msg_ack)),
want_pass: false,
error_kind: Some(Kind::MissingLocalConsensusState)
},
*/
];
for test in tests {
let res = dispatch(&test.ctx, test.msg.clone());
// Additionally check the events and the output objects in the result.
match res {
Ok(proto_output) => {
assert!(
test.want_pass,
"conn_open_ack: test passed but was supposed to fail for test: {}, \nparams {:?} {:?}",
test.name,
test.msg.clone(),
test.ctx.clone()
);
assert!(!proto_output.events.is_empty()); // Some events must exist.
// The object in the output is a ConnectionEnd, should have OPEN state.
let res: ConnectionResult = proto_output.result;
assert_eq!(res.connection_end.state().clone(), State::Open);
for e in proto_output.events.iter() {
assert!(matches!(e, &IbcEvent::OpenAckConnection(_)));
}
}
Err(e) => {
assert!(
!test.want_pass,
"conn_open_ack: failed for test: {}, \nparams {:?} {:?} error: {:?}",
test.name,
test.msg,
test.ctx.clone(),
e,
);
// Verify that the error kind matches
(test.match_error)(e);
}
}
}
}
}
| 40.639456 | 121 | 0.551892 |
338c29235436d0a230472600ecc31b66504882db | 594 | //! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_1355.hrx"
#[allow(unused)]
fn runner() -> crate::TestRunner {
super::runner()
}
#[test]
#[ignore] // wrong error
fn test() {
assert_eq!(
runner().err(
"@function test() {\
\n @return;\
\n}\n\
\ndiv {\
\n x: type-of(test());\
\n}"
),
"Error: Expected expression.\
\n ,\
\n2 | @return;\
\n | ^\
\n \'\
\n input.scss 2:10 root stylesheet",
);
}
| 21.214286 | 83 | 0.427609 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.