hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5d05e1d25a67576b82ce9b7b4f3bd9f474d2feea | 1,028 | use serde::Serialize;
use crate::{
net,
requests::{RequestOld, ResponseResult},
types::StickerSet,
Bot,
};
/// Use this method to get a sticker set.
///
/// [The official docs](https://core.telegram.org/bots/api#getstickerset).
#[serde_with_macros::skip_serializing_none]
#[derive(Debug, Clone, Serialize)]
pub struct GetStickerSet {
#[serde(skip_serializing)]
bot: Bot,
pub name: String,
}
#[async_trait::async_trait]
impl RequestOld for GetStickerSet {
type Output = StickerSet;
async fn send(&self) -> ResponseResult<StickerSet> {
net::request_json(self.bot.client(), self.bot.token(), "getStickerSet", &self).await
}
}
impl GetStickerSet {
pub(crate) fn new<N>(bot: Bot, name: N) -> Self
where
N: Into<String>,
{
let name = name.into();
Self { bot, name }
}
/// Name of the sticker set.
pub fn name<T>(mut self, val: T) -> Self
where
T: Into<String>,
{
self.name = val.into();
self
}
}
| 21.416667 | 92 | 0.609922 |
14489bc14a830cbe82285788b3ab8c4af4f7ac1d | 10,891 | // MIT/Apache2 License
use crate::{
bitmap::Bitmap, color::Color, directive::Directive, gdiobj::{AsGdiObject, GdiObject}, server::SendsDirective,
task::Task, Point,
};
use breadthread::key_type;
use std::{borrow::Cow, mem};
use winapi::{
ctypes::c_int,
shared::{
minwindef::{BYTE, DWORD},
windef::{COLORREF, HDC__},
},
um::wingdi,
};
key_type! {
/// A drawing context. This either points to a window, to a location in memory, to a screen, or some other
/// exotic location.
pub struct Dc(HDC__) : [DcType, 0x991];
}
#[derive(Debug, Clone, Default, PartialEq, Eq, PartialOrd, Ord)]
pub struct PixelFormat {
pub flags: PixelFormatFlags,
pub ty: PixelType,
pub color_bits: BYTE,
pub red_bits: BYTE,
pub red_shift: BYTE,
pub green_bits: BYTE,
pub green_shift: BYTE,
pub blue_bits: BYTE,
pub blue_shift: BYTE,
pub alpha_bits: BYTE,
pub alpha_shift: BYTE,
pub accum_bits: BYTE,
pub accum_red_bits: BYTE,
pub accum_green_bits: BYTE,
pub accum_blue_bits: BYTE,
pub accum_alpha_bits: BYTE,
pub depth_bits: BYTE,
pub stencil_bits: BYTE,
pub aux_buffers: BYTE,
pub visible_mask: DWORD,
}
impl From<PixelFormat> for wingdi::PIXELFORMATDESCRIPTOR {
#[inline]
fn from(pf: PixelFormat) -> wingdi::PIXELFORMATDESCRIPTOR {
let PixelFormat {
flags,
ty,
color_bits,
red_bits,
red_shift,
green_bits,
green_shift,
blue_bits,
blue_shift,
alpha_bits,
alpha_shift,
accum_bits,
accum_red_bits,
accum_green_bits,
accum_blue_bits,
accum_alpha_bits,
depth_bits,
stencil_bits,
aux_buffers,
visible_mask,
} = pf;
wingdi::PIXELFORMATDESCRIPTOR {
nSize: mem::size_of::<wingdi::PIXELFORMATDESCRIPTOR>() as _,
nVersion: 1,
dwFlags: flags.bits(),
iPixelType: match ty {
PixelType::Rgba => wingdi::PFD_TYPE_RGBA,
PixelType::ColorIndex => wingdi::PFD_TYPE_COLORINDEX,
},
cColorBits: color_bits,
cRedBits: red_bits,
cRedShift: red_shift,
cGreenBits: green_bits,
cGreenShift: green_shift,
cBlueBits: blue_bits,
cBlueShift: blue_shift,
cAlphaBits: alpha_bits,
cAlphaShift: alpha_shift,
cAccumBits: accum_bits,
cAccumRedBits: accum_red_bits,
cAccumGreenBits: accum_green_bits,
cAccumBlueBits: accum_blue_bits,
cAccumAlphaBits: accum_alpha_bits,
cDepthBits: depth_bits,
cStencilBits: stencil_bits,
cAuxBuffers: aux_buffers,
iLayerType: 0,
bReserved: 0,
dwLayerMask: 0,
dwVisibleMask: visible_mask,
dwDamageMask: 0,
}
}
}
bitflags::bitflags! {
pub struct PixelFormatFlags: DWORD {
const DRAW_TO_WINDOW = wingdi::PFD_DRAW_TO_WINDOW;
const DRAW_TO_BITMAP = wingdi::PFD_DRAW_TO_BITMAP;
const SUPPORT_GDI = wingdi::PFD_SUPPORT_GDI;
const SUPPORT_OPENGL = wingdi::PFD_SUPPORT_OPENGL;
const GENERIC_ACCELERATED = wingdi::PFD_GENERIC_ACCELERATED;
const GENERIC_FORMAT = wingdi::PFD_GENERIC_FORMAT;
const NEED_PALETTE = wingdi::PFD_NEED_PALETTE;
const NEED_SYSTEM_PALETTE = wingdi::PFD_NEED_SYSTEM_PALETTE;
const DOUBLEBUFFER = wingdi::PFD_DOUBLEBUFFER;
const STEREO = wingdi::PFD_STEREO;
const SWAP_LAYER_BUFFERS = wingdi::PFD_SWAP_LAYER_BUFFERS;
const DEPTH_DONT_CARE = wingdi::PFD_DEPTH_DONTCARE;
const DOUBLEBUFFER_DONT_CARE = wingdi::PFD_DOUBLEBUFFER_DONTCARE;
const STEREO_DONT_CARE = wingdi::PFD_STEREO_DONTCARE;
const SWAP_COPY = wingdi::PFD_SWAP_COPY;
const SWAP_EXCHANGE = wingdi::PFD_SWAP_EXCHANGE;
}
}
impl Default for PixelFormatFlags {
#[inline]
fn default() -> Self {
Self::empty()
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PixelType {
Rgba,
ColorIndex,
}
impl Default for PixelType {
#[inline]
fn default() -> Self {
Self::Rgba
}
}
impl From<PixelType> for BYTE {
#[inline]
fn from(pt: PixelType) -> BYTE {
match pt {
PixelType::Rgba => wingdi::PFD_TYPE_RGBA,
PixelType::ColorIndex => wingdi::PFD_TYPE_COLORINDEX,
}
}
}
#[derive(Debug, Copy, Clone)]
pub enum BitBltOp {
Blackness,
CaptureBlt,
DstInvert,
MergeCopy,
MergePaint,
NoMirrorBitmap,
NotSrcCopy,
NotSrcErase,
PatCopy,
PatInvert,
PatPaint,
SrcAnd,
SrcCopy,
SrcErase,
SrcInvert,
SrcPaint,
Whiteness,
}
impl Dc {
#[inline]
pub fn create_compatible_dc<S: SendsDirective>(
self,
gt: &S,
) -> crate::Result<Task<crate::Result<Dc>>> {
gt.send(Directive::CreateCompatibleDc(self))
}
#[inline]
pub fn delete<S: SendsDirective>(self, gt: &S) -> crate::Result<Task<()>> {
gt.send(Directive::DeleteDc(self))
}
#[inline]
pub fn create_compatible_bitmap<S: SendsDirective>(
self,
gt: &S,
width: i32,
height: i32,
) -> crate::Result<Task<crate::Result<Bitmap>>> {
gt.send(Directive::CreateCompatibleBitmap {
dc: self,
width,
height,
})
}
#[inline]
pub fn draw_pixels<S: SendsDirective, Pixels: IntoIterator<Item = COLORREF>>(
self,
gt: &S,
origin_x: i32,
origin_y: i32,
width: i32,
pixels: Pixels,
) -> crate::Result<Task<crate::Result<()>>> {
gt.send(Directive::SetPixels {
dc: self,
origin_x,
origin_y,
width,
pixels: pixels.into_iter().collect(),
})
}
#[inline]
pub fn select_object<S: SendsDirective, O: AsGdiObject>(
self,
gt: &S,
obj: O,
) -> crate::Result<Task<crate::Result<GdiObject>>> {
gt.send(Directive::SelectObject { dc: self, obj: obj.into_gdi_object() })
}
#[inline]
pub fn swap_buffers<S: SendsDirective>(self, gt: &S) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::SwapBuffers(self))
}
#[inline]
pub fn set_pixel<S: SendsDirective>(
self,
gt: &S,
x: c_int,
y: c_int,
color: Color,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::SetPixel {
dc: self,
x,
y,
color,
})
}
#[inline]
pub fn move_to<S: SendsDirective>(
self,
gt: &S,
x: c_int,
y: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::MoveTo { dc: self, x, y })
}
#[inline]
pub fn line_to<S: SendsDirective>(
self,
gt: &S,
x: c_int,
y: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::LineTo { dc: self, x, y })
}
#[inline]
pub fn rectangle<S: SendsDirective>(
self,
gt: &S,
left: c_int,
top: c_int,
right: c_int,
bottom: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::Rectangle {
dc: self,
left,
top,
right,
bottom,
})
}
#[inline]
pub fn round_rect<S: SendsDirective>(
self,
gt: &S,
left: c_int,
top: c_int,
right: c_int,
bottom: c_int,
width: c_int,
height: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::RoundRect {
dc: self,
left,
top,
right,
bottom,
width,
height,
})
}
#[inline]
pub fn arc<S: SendsDirective>(
self,
gt: &S,
rect_left: c_int,
rect_top: c_int,
rect_right: c_int,
rect_bottom: c_int,
arc_start_x: c_int,
arc_start_y: c_int,
arc_end_x: c_int,
arc_end_y: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::Arc {
dc: self,
rect_left,
rect_top,
rect_right,
rect_bottom,
arc_start_x,
arc_start_y,
arc_end_x,
arc_end_y,
})
}
#[inline]
pub fn ellipse<S: SendsDirective>(
self,
gt: &S,
left: c_int,
top: c_int,
right: c_int,
bottom: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::Ellipse {
dc: self,
left,
top,
right,
bottom,
})
}
#[inline]
pub fn chord<S: SendsDirective>(
self,
gt: &S,
rect_left: c_int,
rect_top: c_int,
rect_right: c_int,
rect_bottom: c_int,
line_x1: c_int,
line_y1: c_int,
line_x2: c_int,
line_y2: c_int,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::Chord {
dc: self,
rect_left,
rect_top,
rect_right,
rect_bottom,
line_x1,
line_y1,
line_x2,
line_y2,
})
}
#[inline]
pub fn bezier_curve<S: SendsDirective, Pts: Into<Cow<'static, [Point]>>>(
self,
gt: &S,
points: Pts,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::BezierCurve {
dc: self,
points: points.into(),
})
}
#[inline]
pub fn polygon<S: SendsDirective, Pts: Into<Cow<'static, [Point]>>>(
self,
gt: &S,
points: Pts,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::Polygon {
dc: self,
points: points.into(),
})
}
#[inline]
pub fn polyline<S: SendsDirective, Pts: Into<Cow<'static, [Point]>>>(
self,
gt: &S,
points: Pts,
) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::Polyline {
dc: self,
points: points.into(),
})
}
#[inline]
pub fn bit_blt<S: SendsDirective>(self, gt: &S, src_x: c_int, src_y: c_int, width: c_int, height: c_int, dst: Dc, dst_x: c_int, dst_y: c_int, op: BitBltOp) -> crate::Result<Task<crate::Result>> {
gt.send(Directive::BitBlt { src: self, dst, src_x, src_y, width, height, dst_x, dst_y, op })
}
}
| 25.505855 | 199 | 0.538518 |
d79d7deba90c9b0a6d2f71f074e758677f1fe992 | 11,941 | // Copyright 2019 Intel Corporation. All Rights Reserved.
//
// Portions Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//
// Portions Copyright 2017 The Chromium OS Authors. All rights reserved.
//
// SPDX-License-Identifier: (Apache-2.0 AND BSD-3-Clause)
extern crate log;
extern crate net_util;
extern crate vhost_rs;
extern crate vhost_user_backend;
use libc::{self, EFD_NONBLOCK};
use log::*;
use net_util::{
open_tap, MacAddr, NetCounters, NetQueuePair, OpenTapError, RxVirtio, Tap, TxVirtio,
};
use option_parser::{OptionParser, OptionParserError};
use std::fmt;
use std::io::{self};
use std::net::Ipv4Addr;
use std::os::unix::io::AsRawFd;
use std::process;
use std::sync::{Arc, Mutex, RwLock};
use std::vec::Vec;
use vhost_rs::vhost_user::message::*;
use vhost_rs::vhost_user::{Error as VhostUserError, Listener};
use vhost_user_backend::{VhostUserBackend, VhostUserDaemon, Vring, VringWorker};
use virtio_bindings::bindings::virtio_net::*;
use virtio_bindings::bindings::virtio_ring::VIRTIO_RING_F_EVENT_IDX;
use vm_memory::{GuestMemoryAtomic, GuestMemoryMmap};
use vmm_sys_util::eventfd::EventFd;
pub type VhostUserResult<T> = std::result::Result<T, VhostUserError>;
pub type Result<T> = std::result::Result<T, Error>;
pub type VhostUserBackendResult<T> = std::result::Result<T, std::io::Error>;
#[derive(Debug)]
pub enum Error {
/// Failed to activate device.
BadActivate,
/// Failed to create kill eventfd
CreateKillEventFd(io::Error),
/// Failed to add event.
EpollCtl(io::Error),
/// Fail to wait event.
EpollWait(io::Error),
/// Failed to create EventFd.
EpollCreateFd,
/// Failed to read Tap.
FailedReadTap,
/// Failed to parse configuration string
FailedConfigParse(OptionParserError),
/// Failed to signal used queue.
FailedSignalingUsedQueue(io::Error),
/// Failed to handle event other than input event.
HandleEventNotEpollIn,
/// Failed to handle unknown event.
HandleEventUnknownEvent,
/// Invalid vring address.
InvalidVringAddr,
/// No vring call fd to notify.
NoVringCallFdNotify,
/// No memory configured.
NoMemoryConfigured,
/// Open tap device failed.
OpenTap(OpenTapError),
/// No socket provided
SocketParameterMissing,
/// Underlying QueuePair error
NetQueuePair(net_util::NetQueuePairError),
}
pub const SYNTAX: &str = "vhost-user-net backend parameters \
\"ip=<ip_addr>,mask=<net_mask>,socket=<socket_path>,\
num_queues=<number_of_queues>,queue_size=<size_of_each_queue>,tap=<if_name>\"";
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "vhost_user_net_error: {:?}", self)
}
}
impl std::error::Error for Error {}
impl std::convert::From<Error> for std::io::Error {
fn from(e: Error) -> Self {
std::io::Error::new(io::ErrorKind::Other, e)
}
}
struct VhostUserNetThread {
net: NetQueuePair,
vring_worker: Option<Arc<VringWorker>>,
kill_evt: EventFd,
}
impl VhostUserNetThread {
/// Create a new virtio network device with the given TAP interface.
fn new(tap: Tap) -> Result<Self> {
Ok(VhostUserNetThread {
vring_worker: None,
kill_evt: EventFd::new(EFD_NONBLOCK).map_err(Error::CreateKillEventFd)?,
net: NetQueuePair {
mem: None,
tap,
rx: RxVirtio::new(),
tx: TxVirtio::new(),
rx_tap_listening: false,
epoll_fd: None,
counters: NetCounters::default(),
tap_event_id: 2,
},
})
}
pub fn set_vring_worker(&mut self, vring_worker: Option<Arc<VringWorker>>) {
self.net.epoll_fd = Some(vring_worker.as_ref().unwrap().as_raw_fd());
self.vring_worker = vring_worker;
}
}
pub struct VhostUserNetBackend {
threads: Vec<Mutex<VhostUserNetThread>>,
num_queues: usize,
queue_size: u16,
queues_per_thread: Vec<u64>,
}
impl VhostUserNetBackend {
fn new(
ip_addr: Ipv4Addr,
host_mac: MacAddr,
netmask: Ipv4Addr,
num_queues: usize,
queue_size: u16,
ifname: Option<&str>,
) -> Result<Self> {
let mut taps = open_tap(
ifname,
Some(ip_addr),
Some(netmask),
&mut Some(host_mac),
num_queues / 2,
None,
)
.map_err(Error::OpenTap)?;
let mut queues_per_thread = Vec::new();
let mut threads = Vec::new();
for (i, tap) in taps.drain(..).enumerate() {
let thread = Mutex::new(VhostUserNetThread::new(tap)?);
threads.push(thread);
queues_per_thread.push(0b11 << (i * 2));
}
Ok(VhostUserNetBackend {
threads,
num_queues,
queue_size,
queues_per_thread,
})
}
}
impl VhostUserBackend for VhostUserNetBackend {
fn num_queues(&self) -> usize {
self.num_queues
}
fn max_queue_size(&self) -> usize {
self.queue_size as usize
}
fn features(&self) -> u64 {
1 << VIRTIO_NET_F_GUEST_CSUM
| 1 << VIRTIO_NET_F_CSUM
| 1 << VIRTIO_NET_F_GUEST_TSO4
| 1 << VIRTIO_NET_F_GUEST_UFO
| 1 << VIRTIO_NET_F_HOST_TSO4
| 1 << VIRTIO_NET_F_HOST_UFO
| 1 << VIRTIO_F_VERSION_1
| 1 << VIRTIO_RING_F_EVENT_IDX
| VhostUserVirtioFeatures::PROTOCOL_FEATURES.bits()
}
fn protocol_features(&self) -> VhostUserProtocolFeatures {
VhostUserProtocolFeatures::MQ | VhostUserProtocolFeatures::REPLY_ACK
}
fn set_event_idx(&mut self, _enabled: bool) {}
fn update_memory(&mut self, mem: GuestMemoryMmap) -> VhostUserBackendResult<()> {
for thread in self.threads.iter() {
thread.lock().unwrap().net.mem = Some(GuestMemoryAtomic::new(mem.clone()));
}
Ok(())
}
fn handle_event(
&self,
device_event: u16,
evset: epoll::Events,
vrings: &[Arc<RwLock<Vring>>],
thread_id: usize,
) -> VhostUserBackendResult<bool> {
if evset != epoll::Events::EPOLLIN {
return Err(Error::HandleEventNotEpollIn.into());
}
let mut thread = self.threads[thread_id].lock().unwrap();
match device_event {
0 => {
let mut vring = vrings[0].write().unwrap();
if thread
.net
.resume_rx(&mut vring.mut_queue())
.map_err(Error::NetQueuePair)?
{
vring
.signal_used_queue()
.map_err(Error::FailedSignalingUsedQueue)?
}
}
1 => {
let mut vring = vrings[1].write().unwrap();
if thread
.net
.process_tx(&mut vring.mut_queue())
.map_err(Error::NetQueuePair)?
{
vring
.signal_used_queue()
.map_err(Error::FailedSignalingUsedQueue)?
}
}
2 => {
let mut vring = vrings[0].write().unwrap();
if thread
.net
.process_rx_tap(&mut vring.mut_queue())
.map_err(Error::NetQueuePair)?
{
vring
.signal_used_queue()
.map_err(Error::FailedSignalingUsedQueue)?
}
}
_ => return Err(Error::HandleEventUnknownEvent.into()),
}
Ok(false)
}
fn exit_event(&self, thread_index: usize) -> Option<(EventFd, Option<u16>)> {
// The exit event is placed after the queues and the tap event, which
// is event index 3.
Some((
self.threads[thread_index]
.lock()
.unwrap()
.kill_evt
.try_clone()
.unwrap(),
Some(3),
))
}
fn queues_per_thread(&self) -> Vec<u64> {
self.queues_per_thread.clone()
}
}
pub struct VhostUserNetBackendConfig {
pub ip: Ipv4Addr,
pub host_mac: MacAddr,
pub mask: Ipv4Addr,
pub socket: String,
pub num_queues: usize,
pub queue_size: u16,
pub tap: Option<String>,
}
impl VhostUserNetBackendConfig {
pub fn parse(backend: &str) -> Result<Self> {
let mut parser = OptionParser::new();
parser
.add("tap")
.add("ip")
.add("host_mac")
.add("mask")
.add("queue_size")
.add("num_queues")
.add("socket");
parser.parse(backend).map_err(Error::FailedConfigParse)?;
let tap = parser.get("tap");
let ip = parser
.convert("ip")
.map_err(Error::FailedConfigParse)?
.unwrap_or_else(|| Ipv4Addr::new(192, 168, 100, 1));
let host_mac = parser
.convert("host_mac")
.map_err(Error::FailedConfigParse)?
.unwrap_or_else(MacAddr::local_random);
let mask = parser
.convert("mask")
.map_err(Error::FailedConfigParse)?
.unwrap_or_else(|| Ipv4Addr::new(255, 255, 255, 0));
let queue_size = parser
.convert("queue_size")
.map_err(Error::FailedConfigParse)?
.unwrap_or(256);
let num_queues = parser
.convert("num_queues")
.map_err(Error::FailedConfigParse)?
.unwrap_or(2);
let socket = parser.get("socket").ok_or(Error::SocketParameterMissing)?;
Ok(VhostUserNetBackendConfig {
ip,
host_mac,
mask,
socket,
num_queues,
queue_size,
tap,
})
}
}
pub fn start_net_backend(backend_command: &str) {
let backend_config = match VhostUserNetBackendConfig::parse(backend_command) {
Ok(config) => config,
Err(e) => {
eprintln!("Failed parsing parameters {:?}", e);
process::exit(1);
}
};
let tap = if let Some(tap) = backend_config.tap.as_ref() {
Some(tap.as_str())
} else {
None
};
let net_backend = Arc::new(RwLock::new(
VhostUserNetBackend::new(
backend_config.ip,
backend_config.host_mac,
backend_config.mask,
backend_config.num_queues,
backend_config.queue_size,
tap,
)
.unwrap(),
));
let listener = Listener::new(&backend_config.socket, true).unwrap();
let mut net_daemon =
VhostUserDaemon::new("vhost-user-net-backend".to_string(), net_backend.clone()).unwrap();
let mut vring_workers = net_daemon.get_vring_workers();
if vring_workers.len() != net_backend.read().unwrap().threads.len() {
error!("Number of vring workers must be identical to the number of backend threads");
process::exit(1);
}
for thread in net_backend.read().unwrap().threads.iter() {
thread
.lock()
.unwrap()
.set_vring_worker(Some(vring_workers.remove(0)));
}
if let Err(e) = net_daemon.start(listener) {
error!(
"failed to start daemon for vhost-user-net with error: {:?}",
e
);
process::exit(1);
}
if let Err(e) = net_daemon.wait() {
error!("Error from the main thread: {:?}", e);
}
for thread in net_backend.read().unwrap().threads.iter() {
if let Err(e) = thread.lock().unwrap().kill_evt.write(1) {
error!("Error shutting down worker thread: {:?}", e)
}
}
}
| 29.927318 | 97 | 0.565949 |
1c2bcc9236fad2782203c0de72eb0337316e55a6 | 4,096 | // Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
//TODO FIXME for fuzzing Arbitrary;
#![allow(clippy::unit_arg)]
mod language_storage_ext;
pub mod account_address;
pub mod gas_schedule {
pub use move_core_types::gas_schedule::*;
pub use move_vm_types::gas_schedule::{
calculate_intrinsic_gas, new_from_instructions, zero_cost_schedule, GasStatus,
};
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[repr(u8)]
pub enum NativeCostIndex {
SHA2_256 = 0,
SHA3_256 = 1,
ED25519_VERIFY = 2,
ED25519_THRESHOLD_VERIFY = 3,
BCS_TO_BYTES = 4,
LENGTH = 5,
EMPTY = 6,
BORROW = 7,
BORROW_MUT = 8,
PUSH_BACK = 9,
POP_BACK = 10,
DESTROY_EMPTY = 11,
SWAP = 12,
ED25519_VALIDATE_KEY = 13,
SIGNER_BORROW = 14,
CREATE_SIGNER = 15,
DESTROY_SIGNER = 16,
EMIT_EVENT = 17,
BCS_TO_ADDRESS = 18,
TOKEN_NAME_OF = 19,
KECCAK_256 = 20,
RIPEMD160 = 21,
ECRECOVER = 22,
U256_FROM_BYTES = 23,
U256_ADD = 24,
U256_SUB = 25,
U256_MUL = 26,
U256_DIV = 27,
U256_REM = 28,
U256_POW = 29,
VEC_APPEND = 30,
VEC_REMOVE = 31,
VEC_REVERSE = 32,
}
impl NativeCostIndex {
//note: should change this value when add new native function.
pub const NUMBER_OF_NATIVE_FUNCTIONS: usize = 33;
}
}
pub mod location {
pub use move_ir_types::location::Loc;
}
pub mod identifier {
pub use move_core_types::identifier::{IdentStr, Identifier};
}
pub mod language_storage {
pub use crate::language_storage_ext::FunctionId;
pub use move_core_types::language_storage::{
ModuleId, ResourceKey, StructTag, TypeTag, CODE_TAG, CORE_CODE_ADDRESS, RESOURCE_TAG,
};
}
pub mod move_resource;
pub mod transaction_argument {
pub use move_core_types::transaction_argument::*;
}
pub mod parser {
use crate::language_storage::TypeTag;
use anyhow::{bail, Result};
use move_core_types::language_storage::StructTag;
pub use move_core_types::parser::{
parse_transaction_argument, parse_type_tag, parse_type_tags,
};
pub fn parse_struct_tag(s: &str) -> Result<StructTag> {
let type_tag = parse_type_tag(s)?;
match type_tag {
TypeTag::Struct(st) => Ok(st),
t => bail!("expect a struct tag, found: {:?}", t),
}
}
}
#[cfg(any(test, feature = "fuzzing"))]
pub mod proptest_types;
pub mod transaction_metadata;
pub mod value {
pub use move_core_types::value::*;
}
pub mod values {
pub use move_vm_types::values::*;
}
pub mod loaded_data {
pub mod runtime_types {
pub use move_vm_types::loaded_data::runtime_types::{StructType, Type};
}
}
pub mod data_store {
pub use move_vm_types::data_store::DataStore;
}
pub mod file_format {
pub use vm::file_format::*;
}
pub mod normalized {
pub use vm::normalized::*;
}
pub mod compatibility {
pub use vm::compatibility::*;
}
pub mod views {
pub use vm::views::*;
}
pub mod data_cache {}
pub mod access {
pub use vm::access::{ModuleAccess, ScriptAccess};
}
pub mod errors {
pub use vm::errors::*;
pub use vm::IndexKind;
}
pub mod write_set;
pub mod state_view;
pub mod transaction;
pub mod contract_event;
pub mod vm_status {
pub use move_core_types::vm_status::*;
pub mod sub_status {
pub use move_core_types::vm_status::sub_status::*;
}
}
pub mod effects {
pub use move_core_types::effects::*;
}
pub mod bytecode_verifier {
pub use bytecode_verifier::{dependencies, script_signature, verify_module, verify_script};
}
pub mod access_path;
pub mod account_config;
pub mod block_metadata;
pub mod event;
pub mod genesis_config;
pub mod on_chain_config;
pub mod on_chain_resource;
pub mod serde_helper;
pub mod sign_message;
pub mod sips;
pub mod time;
pub mod token;
#[cfg(test)]
mod unit_tests;
| 22.629834 | 94 | 0.650391 |
e87181c03a85dc71aff904a1a1b14eb92923da29 | 3,106 | use clap_sys::audio_buffer::clap_audio_buffer;
struct HostAudioPortBuffer<B, S> {
channel_buffers: Vec<B>,
buffer_list: Vec<*const S>,
min_buffer_length: usize,
}
impl<B: Sized + AsRef<[S]>, S> HostAudioPortBuffer<B, S> {
pub fn new(channel_buffers: Vec<B>) -> Self {
let buffer_list: Vec<_> = channel_buffers
.iter()
.map(|b| b.as_ref().as_ptr())
.collect();
let mut buf = Self {
buffer_list,
channel_buffers,
min_buffer_length: 0,
};
buf.update_lengths();
buf
}
fn update_lengths(&mut self) {
self.min_buffer_length = self
.channel_buffers
.iter()
.map(|b| b.as_ref().len())
.min()
.unwrap_or(0);
}
}
impl<B: Sized + AsRef<[f32]>> HostAudioPortBuffer<B, f32> {
// TODO: maybe unsafe?
pub fn as_raw(&self) -> clap_audio_buffer {
clap_audio_buffer {
data32: self.buffer_list.as_ptr(),
data64: ::core::ptr::null(),
channel_count: self.buffer_list.len() as u32,
latency: 0, // TODO
constant_mask: 0, // TODO
}
}
}
pub struct HostAudioBufferCollection<B, S> {
ports: Vec<HostAudioPortBuffer<B, S>>,
raw_ports: Vec<clap_audio_buffer>,
min_buffer_length: usize,
}
impl<B, S> HostAudioBufferCollection<B, S> {
#[inline]
pub(crate) fn raw_buffers(&self) -> *const clap_audio_buffer {
self.raw_ports.as_ptr()
}
#[inline]
pub(crate) fn port_count(&self) -> usize {
self.raw_ports.len()
}
#[inline]
pub(crate) fn min_buffer_length(&self) -> usize {
self.min_buffer_length
}
}
impl<B: Sized + AsRef<[S]>, S> HostAudioBufferCollection<B, S> {
pub fn get_channel_buffer(&self, port_index: usize, channel_index: usize) -> Option<&[S]> {
Some(
self.ports
.get(port_index)?
.channel_buffers
.get(channel_index)?
.as_ref(),
)
}
}
impl<B: Sized + AsRef<[f32]>> HostAudioBufferCollection<B, f32> {
#[inline]
pub fn for_ports_and_channels<F>(port_count: usize, channel_count: usize, buffer: F) -> Self
where
F: Fn() -> B,
{
Self::from_buffers((0..port_count).map(|_| (0..channel_count).map(|_| buffer())))
}
#[inline]
pub fn from_buffers<IPorts, IChannels>(ports: IPorts) -> Self
where
IPorts: IntoIterator<Item = IChannels>,
IChannels: IntoIterator<Item = B>,
{
let buffers = ports
.into_iter()
.map(|channels| HostAudioPortBuffer::new(channels.into_iter().collect()))
.collect();
Self::from_vecs(buffers)
}
fn from_vecs(ports: Vec<HostAudioPortBuffer<B, f32>>) -> Self {
let raw_ports = ports.iter().map(|p| p.as_raw()).collect();
Self {
min_buffer_length: ports.iter().map(|p| p.min_buffer_length).min().unwrap_or(0),
ports,
raw_ports,
}
}
}
| 26.775862 | 96 | 0.556665 |
62db2eff839fb4757f2cee6ca72bcdd5372ae0dd | 16,561 | // This file is part of Substrate.
// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Some utilities for helping access storage with arbitrary key types.
use sp_std::prelude::*;
use codec::{Encode, Decode};
use crate::{StorageHasher, Twox128, storage::unhashed};
use crate::hash::ReversibleStorageHasher;
use super::PrefixIterator;
/// Utility to iterate through raw items in storage.
pub struct StorageIterator<T> {
prefix: Vec<u8>,
previous_key: Vec<u8>,
drain: bool,
_phantom: ::sp_std::marker::PhantomData<T>,
}
impl<T> StorageIterator<T> {
/// Construct iterator to iterate over map items in `module` for the map called `item`.
#[deprecated(note="Please use the storage_iter or storage_iter_with_suffix functions instead")]
pub fn new(module: &[u8], item: &[u8]) -> Self {
#[allow(deprecated)]
Self::with_suffix(module, item, &[][..])
}
/// Construct iterator to iterate over map items in `module` for the map called `item`.
#[deprecated(note="Please use the storage_iter or storage_iter_with_suffix functions instead")]
pub fn with_suffix(module: &[u8], item: &[u8], suffix: &[u8]) -> Self {
let mut prefix = Vec::new();
prefix.extend_from_slice(&Twox128::hash(module));
prefix.extend_from_slice(&Twox128::hash(item));
prefix.extend_from_slice(suffix);
let previous_key = prefix.clone();
Self { prefix, previous_key, drain: false, _phantom: Default::default() }
}
/// Mutate this iterator into a draining iterator; items iterated are removed from storage.
pub fn drain(mut self) -> Self {
self.drain = true;
self
}
}
impl<T: Decode + Sized> Iterator for StorageIterator<T> {
type Item = (Vec<u8>, T);
fn next(&mut self) -> Option<(Vec<u8>, T)> {
loop {
let maybe_next = sp_io::storage::next_key(&self.previous_key)
.filter(|n| n.starts_with(&self.prefix));
break match maybe_next {
Some(next) => {
self.previous_key = next.clone();
let maybe_value = frame_support::storage::unhashed::get::<T>(&next);
match maybe_value {
Some(value) => {
if self.drain {
frame_support::storage::unhashed::kill(&next);
}
Some((self.previous_key[self.prefix.len()..].to_vec(), value))
}
None => continue,
}
}
None => None,
}
}
}
}
/// Utility to iterate through raw items in storage.
pub struct StorageKeyIterator<K, T, H: ReversibleStorageHasher> {
prefix: Vec<u8>,
previous_key: Vec<u8>,
drain: bool,
_phantom: ::sp_std::marker::PhantomData<(K, T, H)>,
}
impl<K, T, H: ReversibleStorageHasher> StorageKeyIterator<K, T, H> {
/// Construct iterator to iterate over map items in `module` for the map called `item`.
#[deprecated(note="Please use the storage_key_iter or storage_key_iter_with_suffix functions instead")]
pub fn new(module: &[u8], item: &[u8]) -> Self {
#[allow(deprecated)]
Self::with_suffix(module, item, &[][..])
}
/// Construct iterator to iterate over map items in `module` for the map called `item`.
#[deprecated(note="Please use the storage_key_iter or storage_key_iter_with_suffix functions instead")]
pub fn with_suffix(module: &[u8], item: &[u8], suffix: &[u8]) -> Self {
let mut prefix = Vec::new();
prefix.extend_from_slice(&Twox128::hash(module));
prefix.extend_from_slice(&Twox128::hash(item));
prefix.extend_from_slice(suffix);
let previous_key = prefix.clone();
Self { prefix, previous_key, drain: false, _phantom: Default::default() }
}
/// Mutate this iterator into a draining iterator; items iterated are removed from storage.
pub fn drain(mut self) -> Self {
self.drain = true;
self
}
}
impl<K: Decode + Sized, T: Decode + Sized, H: ReversibleStorageHasher> Iterator
for StorageKeyIterator<K, T, H>
{
type Item = (K, T);
fn next(&mut self) -> Option<(K, T)> {
loop {
let maybe_next = sp_io::storage::next_key(&self.previous_key)
.filter(|n| n.starts_with(&self.prefix));
break match maybe_next {
Some(next) => {
self.previous_key = next.clone();
let mut key_material = H::reverse(&next[self.prefix.len()..]);
match K::decode(&mut key_material) {
Ok(key) => {
let maybe_value = frame_support::storage::unhashed::get::<T>(&next);
match maybe_value {
Some(value) => {
if self.drain {
frame_support::storage::unhashed::kill(&next);
}
Some((key, value))
}
None => continue,
}
}
Err(_) => continue,
}
}
None => None,
}
}
}
}
/// Construct iterator to iterate over map items in `module` for the map called `item`.
pub fn storage_iter<T: Decode + Sized>(module: &[u8], item: &[u8]) -> PrefixIterator<(Vec<u8>, T)> {
storage_iter_with_suffix(module, item, &[][..])
}
/// Construct iterator to iterate over map items in `module` for the map called `item`.
pub fn storage_iter_with_suffix<T: Decode + Sized>(
module: &[u8],
item: &[u8],
suffix: &[u8],
) -> PrefixIterator<(Vec<u8>, T)> {
let mut prefix = Vec::new();
prefix.extend_from_slice(&Twox128::hash(module));
prefix.extend_from_slice(&Twox128::hash(item));
prefix.extend_from_slice(suffix);
let previous_key = prefix.clone();
let closure = |raw_key_without_prefix: &[u8], raw_value: &[u8]| {
let value = T::decode(&mut &raw_value[..])?;
Ok((raw_key_without_prefix.to_vec(), value))
};
PrefixIterator { prefix, previous_key, drain: false, closure }
}
/// Construct iterator to iterate over map items in `module` for the map called `item`.
pub fn storage_key_iter<K: Decode + Sized, T: Decode + Sized, H: ReversibleStorageHasher>(
module: &[u8],
item: &[u8],
) -> PrefixIterator<(K, T)> {
storage_key_iter_with_suffix::<K, T, H>(module, item, &[][..])
}
/// Construct iterator to iterate over map items in `module` for the map called `item`.
pub fn storage_key_iter_with_suffix<K: Decode + Sized, T: Decode + Sized, H: ReversibleStorageHasher>(
module: &[u8],
item: &[u8],
suffix: &[u8],
) -> PrefixIterator<(K, T)> {
let mut prefix = Vec::new();
prefix.extend_from_slice(&Twox128::hash(module));
prefix.extend_from_slice(&Twox128::hash(item));
prefix.extend_from_slice(suffix);
let previous_key = prefix.clone();
let closure = |raw_key_without_prefix: &[u8], raw_value: &[u8]| {
let mut key_material = H::reverse(raw_key_without_prefix);
let key = K::decode(&mut key_material)?;
let value = T::decode(&mut &raw_value[..])?;
Ok((key, value))
};
PrefixIterator { prefix, previous_key, drain: false, closure }
}
/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.
pub fn have_storage_value(module: &[u8], item: &[u8], hash: &[u8]) -> bool {
get_storage_value::<()>(module, item, hash).is_some()
}
/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.
pub fn get_storage_value<T: Decode + Sized>(module: &[u8], item: &[u8], hash: &[u8]) -> Option<T> {
let mut key = vec![0u8; 32 + hash.len()];
key[0..16].copy_from_slice(&Twox128::hash(module));
key[16..32].copy_from_slice(&Twox128::hash(item));
key[32..].copy_from_slice(hash);
frame_support::storage::unhashed::get::<T>(&key)
}
/// Take a particular value in storage by the `module`, the map's `item` name and the key `hash`.
pub fn take_storage_value<T: Decode + Sized>(module: &[u8], item: &[u8], hash: &[u8]) -> Option<T> {
let mut key = vec![0u8; 32 + hash.len()];
key[0..16].copy_from_slice(&Twox128::hash(module));
key[16..32].copy_from_slice(&Twox128::hash(item));
key[32..].copy_from_slice(hash);
frame_support::storage::unhashed::take::<T>(&key)
}
/// Put a particular value into storage by the `module`, the map's `item` name and the key `hash`.
pub fn put_storage_value<T: Encode>(module: &[u8], item: &[u8], hash: &[u8], value: T) {
let mut key = vec![0u8; 32 + hash.len()];
key[0..16].copy_from_slice(&Twox128::hash(module));
key[16..32].copy_from_slice(&Twox128::hash(item));
key[32..].copy_from_slice(hash);
frame_support::storage::unhashed::put(&key, &value);
}
/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.
pub fn remove_storage_prefix(module: &[u8], item: &[u8], hash: &[u8]) {
let mut key = vec![0u8; 32 + hash.len()];
key[0..16].copy_from_slice(&Twox128::hash(module));
key[16..32].copy_from_slice(&Twox128::hash(item));
key[32..].copy_from_slice(hash);
frame_support::storage::unhashed::kill_prefix(&key, None);
}
/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.
pub fn take_storage_item<K: Encode + Sized, T: Decode + Sized, H: StorageHasher>(
module: &[u8],
item: &[u8],
key: K,
) -> Option<T> {
take_storage_value(module, item, key.using_encoded(H::hash).as_ref())
}
/// Move a storage from a pallet prefix to another pallet prefix.
///
/// Keys used in pallet storages always start with:
/// `concat(twox_128(pallet_name), towx_128(storage_name))`.
///
/// This function will remove all value for which the key start with
/// `concat(twox_128(old_pallet_name), towx_128(storage_name))` and insert them at the key with
/// the start replaced by `concat(twox_128(new_pallet_name), towx_128(storage_name))`.
///
/// # Example
///
/// If a pallet named "my_example" has 2 storages named "Foo" and "Bar" and the pallet is renamed
/// "my_new_example_name", a migration can be:
/// ```
/// # use frame_support::storage::migration::move_storage_from_pallet;
/// # sp_io::TestExternalities::new_empty().execute_with(|| {
/// move_storage_from_pallet(b"Foo", b"my_example", b"my_new_example_name");
/// move_storage_from_pallet(b"Bar", b"my_example", b"my_new_example_name");
/// # })
/// ```
pub fn move_storage_from_pallet(
storage_name: &[u8],
old_pallet_name: &[u8],
new_pallet_name: &[u8]
) {
let mut new_prefix = Vec::new();
new_prefix.extend_from_slice(&Twox128::hash(new_pallet_name));
new_prefix.extend_from_slice(&Twox128::hash(storage_name));
let mut old_prefix = Vec::new();
old_prefix.extend_from_slice(&Twox128::hash(old_pallet_name));
old_prefix.extend_from_slice(&Twox128::hash(storage_name));
move_prefix(&old_prefix, &new_prefix);
if let Some(value) = unhashed::get_raw(&old_prefix) {
unhashed::put_raw(&new_prefix, &value);
unhashed::kill(&old_prefix);
}
}
/// Move all storages from a pallet prefix to another pallet prefix.
///
/// Keys used in pallet storages always start with:
/// `concat(twox_128(pallet_name), towx_128(storage_name))`.
///
/// This function will remove all value for which the key start with `twox_128(old_pallet_name)`
/// and insert them at the key with the start replaced by `twox_128(new_pallet_name)`.
///
/// NOTE: The value at the key `twox_128(old_pallet_name)` is not moved.
///
/// # Example
///
/// If a pallet named "my_example" has some storages and the pallet is renamed
/// "my_new_example_name", a migration can be:
/// ```
/// # use frame_support::storage::migration::move_pallet;
/// # sp_io::TestExternalities::new_empty().execute_with(|| {
/// move_pallet(b"my_example", b"my_new_example_name");
/// # })
/// ```
pub fn move_pallet(old_pallet_name: &[u8], new_pallet_name: &[u8]) {
move_prefix(&Twox128::hash(old_pallet_name), &Twox128::hash(new_pallet_name))
}
/// Move all `(key, value)` after some prefix to the another prefix
///
/// This function will remove all value for which the key start with `from_prefix`
/// and insert them at the key with the start replaced by `to_prefix`.
///
/// NOTE: The value at the key `from_prefix` is not moved.
pub fn move_prefix(from_prefix: &[u8], to_prefix: &[u8]) {
if from_prefix == to_prefix {
return
}
let iter = PrefixIterator {
prefix: from_prefix.to_vec(),
previous_key: from_prefix.to_vec(),
drain: true,
closure: |key, value| Ok((key.to_vec(), value.to_vec())),
};
for (key, value) in iter {
let full_key = [to_prefix, &key].concat();
unhashed::put_raw(&full_key, &value);
}
}
#[cfg(test)]
mod tests {
use crate::{
pallet_prelude::{StorageValue, StorageMap, Twox64Concat, Twox128},
hash::StorageHasher,
};
use sp_io::TestExternalities;
use super::{
move_prefix,
move_pallet,
move_storage_from_pallet,
storage_iter,
storage_key_iter,
};
struct OldPalletStorageValuePrefix;
impl frame_support::traits::StorageInstance for OldPalletStorageValuePrefix {
const STORAGE_PREFIX: &'static str = "foo_value";
fn pallet_prefix() -> &'static str {
"my_old_pallet"
}
}
type OldStorageValue = StorageValue<OldPalletStorageValuePrefix, u32>;
struct OldPalletStorageMapPrefix;
impl frame_support::traits::StorageInstance for OldPalletStorageMapPrefix {
const STORAGE_PREFIX: &'static str = "foo_map";
fn pallet_prefix() -> &'static str {
"my_old_pallet"
}
}
type OldStorageMap = StorageMap<OldPalletStorageMapPrefix, Twox64Concat, u32, u32>;
struct NewPalletStorageValuePrefix;
impl frame_support::traits::StorageInstance for NewPalletStorageValuePrefix {
const STORAGE_PREFIX: &'static str = "foo_value";
fn pallet_prefix() -> &'static str {
"my_new_pallet"
}
}
type NewStorageValue = StorageValue<NewPalletStorageValuePrefix, u32>;
struct NewPalletStorageMapPrefix;
impl frame_support::traits::StorageInstance for NewPalletStorageMapPrefix {
const STORAGE_PREFIX: &'static str = "foo_map";
fn pallet_prefix() -> &'static str {
"my_new_pallet"
}
}
type NewStorageMap = StorageMap<NewPalletStorageMapPrefix, Twox64Concat, u32, u32>;
#[test]
fn test_move_prefix() {
TestExternalities::new_empty().execute_with(|| {
OldStorageValue::put(3);
OldStorageMap::insert(1, 2);
OldStorageMap::insert(3, 4);
move_prefix(&Twox128::hash(b"my_old_pallet"), &Twox128::hash(b"my_new_pallet"));
assert_eq!(OldStorageValue::get(), None);
assert_eq!(OldStorageMap::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(NewStorageValue::get(), Some(3));
assert_eq!(NewStorageMap::iter().collect::<Vec<_>>(), vec![(1, 2), (3, 4)]);
})
}
#[test]
fn test_move_storage() {
TestExternalities::new_empty().execute_with(|| {
OldStorageValue::put(3);
OldStorageMap::insert(1, 2);
OldStorageMap::insert(3, 4);
move_storage_from_pallet(b"foo_map", b"my_old_pallet", b"my_new_pallet");
assert_eq!(OldStorageValue::get(), Some(3));
assert_eq!(OldStorageMap::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(NewStorageValue::get(), None);
assert_eq!(NewStorageMap::iter().collect::<Vec<_>>(), vec![(1, 2), (3, 4)]);
move_storage_from_pallet(b"foo_value", b"my_old_pallet", b"my_new_pallet");
assert_eq!(OldStorageValue::get(), None);
assert_eq!(OldStorageMap::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(NewStorageValue::get(), Some(3));
assert_eq!(NewStorageMap::iter().collect::<Vec<_>>(), vec![(1, 2), (3, 4)]);
})
}
#[test]
fn test_move_pallet() {
TestExternalities::new_empty().execute_with(|| {
OldStorageValue::put(3);
OldStorageMap::insert(1, 2);
OldStorageMap::insert(3, 4);
move_pallet(b"my_old_pallet", b"my_new_pallet");
assert_eq!(OldStorageValue::get(), None);
assert_eq!(OldStorageMap::iter().collect::<Vec<_>>(), vec![]);
assert_eq!(NewStorageValue::get(), Some(3));
assert_eq!(NewStorageMap::iter().collect::<Vec<_>>(), vec![(1, 2), (3, 4)]);
})
}
#[test]
fn test_storage_iter() {
TestExternalities::new_empty().execute_with(|| {
OldStorageValue::put(3);
OldStorageMap::insert(1, 2);
OldStorageMap::insert(3, 4);
assert_eq!(
storage_key_iter::<i32, i32, Twox64Concat>(b"my_old_pallet", b"foo_map").collect::<Vec<_>>(),
vec![(1, 2), (3, 4)],
);
assert_eq!(
storage_iter(b"my_old_pallet", b"foo_map").drain().map(|t| t.1).collect::<Vec<i32>>(),
vec![2, 4],
);
assert_eq!(OldStorageMap::iter().collect::<Vec<_>>(), vec![]);
// Empty because storage iterator skips over the entry under the first key
assert_eq!(
storage_iter::<i32>(b"my_old_pallet", b"foo_value").drain().next(),
None
);
assert_eq!(OldStorageValue::get(), Some(3));
});
}
}
| 34.430353 | 104 | 0.683534 |
e8e06298f0cc356f60c308ee58e589ba29475c15 | 8,143 | #[doc = "Reader of register RTC_SR"]
pub type R = crate::R<u32, super::RTC_SR>;
#[doc = "Acknowledge for Update\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ACKUPD_A {
#[doc = "0: Time and calendar registers cannot be updated."]
FREERUN,
#[doc = "1: Time and calendar registers can be updated."]
UPDATE,
}
impl From<ACKUPD_A> for bool {
#[inline(always)]
fn from(variant: ACKUPD_A) -> Self {
match variant {
ACKUPD_A::FREERUN => false,
ACKUPD_A::UPDATE => true,
}
}
}
#[doc = "Reader of field `ACKUPD`"]
pub type ACKUPD_R = crate::R<bool, ACKUPD_A>;
impl ACKUPD_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ACKUPD_A {
match self.bits {
false => ACKUPD_A::FREERUN,
true => ACKUPD_A::UPDATE,
}
}
#[doc = "Checks if the value of the field is `FREERUN`"]
#[inline(always)]
pub fn is_freerun(&self) -> bool {
*self == ACKUPD_A::FREERUN
}
#[doc = "Checks if the value of the field is `UPDATE`"]
#[inline(always)]
pub fn is_update(&self) -> bool {
*self == ACKUPD_A::UPDATE
}
}
#[doc = "Alarm Flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ALARM_A {
#[doc = "0: No alarm matching condition occurred."]
NO_ALARMEVENT,
#[doc = "1: An alarm matching condition has occurred."]
ALARMEVENT,
}
impl From<ALARM_A> for bool {
#[inline(always)]
fn from(variant: ALARM_A) -> Self {
match variant {
ALARM_A::NO_ALARMEVENT => false,
ALARM_A::ALARMEVENT => true,
}
}
}
#[doc = "Reader of field `ALARM`"]
pub type ALARM_R = crate::R<bool, ALARM_A>;
impl ALARM_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ALARM_A {
match self.bits {
false => ALARM_A::NO_ALARMEVENT,
true => ALARM_A::ALARMEVENT,
}
}
#[doc = "Checks if the value of the field is `NO_ALARMEVENT`"]
#[inline(always)]
pub fn is_no_alarmevent(&self) -> bool {
*self == ALARM_A::NO_ALARMEVENT
}
#[doc = "Checks if the value of the field is `ALARMEVENT`"]
#[inline(always)]
pub fn is_alarmevent(&self) -> bool {
*self == ALARM_A::ALARMEVENT
}
}
#[doc = "Second Event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEC_A {
#[doc = "0: No second event has occurred since the last clear."]
NO_SECEVENT,
#[doc = "1: At least one second event has occurred since the last clear."]
SECEVENT,
}
impl From<SEC_A> for bool {
#[inline(always)]
fn from(variant: SEC_A) -> Self {
match variant {
SEC_A::NO_SECEVENT => false,
SEC_A::SECEVENT => true,
}
}
}
#[doc = "Reader of field `SEC`"]
pub type SEC_R = crate::R<bool, SEC_A>;
impl SEC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SEC_A {
match self.bits {
false => SEC_A::NO_SECEVENT,
true => SEC_A::SECEVENT,
}
}
#[doc = "Checks if the value of the field is `NO_SECEVENT`"]
#[inline(always)]
pub fn is_no_secevent(&self) -> bool {
*self == SEC_A::NO_SECEVENT
}
#[doc = "Checks if the value of the field is `SECEVENT`"]
#[inline(always)]
pub fn is_secevent(&self) -> bool {
*self == SEC_A::SECEVENT
}
}
#[doc = "Time Event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TIMEV_A {
#[doc = "0: No time event has occurred since the last clear."]
NO_TIMEVENT,
#[doc = "1: At least one time event has occurred since the last clear."]
TIMEVENT,
}
impl From<TIMEV_A> for bool {
#[inline(always)]
fn from(variant: TIMEV_A) -> Self {
match variant {
TIMEV_A::NO_TIMEVENT => false,
TIMEV_A::TIMEVENT => true,
}
}
}
#[doc = "Reader of field `TIMEV`"]
pub type TIMEV_R = crate::R<bool, TIMEV_A>;
impl TIMEV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIMEV_A {
match self.bits {
false => TIMEV_A::NO_TIMEVENT,
true => TIMEV_A::TIMEVENT,
}
}
#[doc = "Checks if the value of the field is `NO_TIMEVENT`"]
#[inline(always)]
pub fn is_no_timevent(&self) -> bool {
*self == TIMEV_A::NO_TIMEVENT
}
#[doc = "Checks if the value of the field is `TIMEVENT`"]
#[inline(always)]
pub fn is_timevent(&self) -> bool {
*self == TIMEV_A::TIMEVENT
}
}
#[doc = "Calendar Event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CALEV_A {
#[doc = "0: No calendar event has occurred since the last clear."]
NO_CALEVENT,
#[doc = "1: At least one calendar event has occurred since the last clear."]
CALEVENT,
}
impl From<CALEV_A> for bool {
#[inline(always)]
fn from(variant: CALEV_A) -> Self {
match variant {
CALEV_A::NO_CALEVENT => false,
CALEV_A::CALEVENT => true,
}
}
}
#[doc = "Reader of field `CALEV`"]
pub type CALEV_R = crate::R<bool, CALEV_A>;
impl CALEV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CALEV_A {
match self.bits {
false => CALEV_A::NO_CALEVENT,
true => CALEV_A::CALEVENT,
}
}
#[doc = "Checks if the value of the field is `NO_CALEVENT`"]
#[inline(always)]
pub fn is_no_calevent(&self) -> bool {
*self == CALEV_A::NO_CALEVENT
}
#[doc = "Checks if the value of the field is `CALEVENT`"]
#[inline(always)]
pub fn is_calevent(&self) -> bool {
*self == CALEV_A::CALEVENT
}
}
#[doc = "Time and/or Date Free Running Error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TDERR_A {
#[doc = "0: The internal free running counters are carrying valid values since the last read of the Status Register (RTC_SR)."]
CORRECT,
#[doc = "1: The internal free running counters have been corrupted (invalid date or time, non-BCD values) since the last read and/or they are still invalid."]
ERR_TIMEDATE,
}
impl From<TDERR_A> for bool {
#[inline(always)]
fn from(variant: TDERR_A) -> Self {
match variant {
TDERR_A::CORRECT => false,
TDERR_A::ERR_TIMEDATE => true,
}
}
}
#[doc = "Reader of field `TDERR`"]
pub type TDERR_R = crate::R<bool, TDERR_A>;
impl TDERR_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TDERR_A {
match self.bits {
false => TDERR_A::CORRECT,
true => TDERR_A::ERR_TIMEDATE,
}
}
#[doc = "Checks if the value of the field is `CORRECT`"]
#[inline(always)]
pub fn is_correct(&self) -> bool {
*self == TDERR_A::CORRECT
}
#[doc = "Checks if the value of the field is `ERR_TIMEDATE`"]
#[inline(always)]
pub fn is_err_timedate(&self) -> bool {
*self == TDERR_A::ERR_TIMEDATE
}
}
impl R {
#[doc = "Bit 0 - Acknowledge for Update"]
#[inline(always)]
pub fn ackupd(&self) -> ACKUPD_R {
ACKUPD_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Alarm Flag"]
#[inline(always)]
pub fn alarm(&self) -> ALARM_R {
ALARM_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Second Event"]
#[inline(always)]
pub fn sec(&self) -> SEC_R {
SEC_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Time Event"]
#[inline(always)]
pub fn timev(&self) -> TIMEV_R {
TIMEV_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Calendar Event"]
#[inline(always)]
pub fn calev(&self) -> CALEV_R {
CALEV_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Time and/or Date Free Running Error"]
#[inline(always)]
pub fn tderr(&self) -> TDERR_R {
TDERR_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
| 30.271375 | 162 | 0.576569 |
ed55c1f5a7c07073ea45b5c200b852a72e216b63 | 2,493 | use clipboard::{ClipboardContext, ClipboardProvider};
use ruffle_core::backend::ui::{Error, MouseCursor, UiBackend};
use std::rc::Rc;
use tinyfiledialogs::{message_box_ok, MessageBoxIcon};
use winit::window::{Fullscreen, Window};
pub struct DesktopUiBackend {
window: Rc<Window>,
cursor_visible: bool,
clipboard: ClipboardContext,
}
impl DesktopUiBackend {
pub fn new(window: Rc<Window>) -> Self {
Self {
window,
cursor_visible: true,
clipboard: ClipboardProvider::new().unwrap(),
}
}
}
// TODO: Move link to https://ruffle.rs/faq or similar
const UNSUPPORTED_CONTENT_MESSAGE: &str = "\
This content is not yet supported by Ruffle and will likely not run as intended.
See the following link for more info:
https://github.com/ruffle-rs/ruffle/wiki/Frequently-Asked-Questions-For-Users";
const DOWNLOAD_FAILED_MESSAGE: &str = "Ruffle failed to open or download this file.";
impl UiBackend for DesktopUiBackend {
fn mouse_visible(&self) -> bool {
self.cursor_visible
}
fn set_mouse_visible(&mut self, visible: bool) {
self.window.set_cursor_visible(visible);
self.cursor_visible = visible;
}
fn set_mouse_cursor(&mut self, cursor: MouseCursor) {
use winit::window::CursorIcon;
let icon = match cursor {
MouseCursor::Arrow => CursorIcon::Arrow,
MouseCursor::Hand => CursorIcon::Hand,
MouseCursor::IBeam => CursorIcon::Text,
MouseCursor::Grab => CursorIcon::Grab,
};
self.window.set_cursor_icon(icon);
}
fn set_clipboard_content(&mut self, content: String) {
self.clipboard.set_contents(content).unwrap();
}
fn set_fullscreen(&mut self, is_full: bool) -> Result<(), Error> {
self.window.set_fullscreen(if is_full {
Some(Fullscreen::Borderless(None))
} else {
None
});
Ok(())
}
fn display_unsupported_message(&self) {
message_box_ok(
"Ruffle - Unsupported content",
UNSUPPORTED_CONTENT_MESSAGE,
MessageBoxIcon::Warning,
);
}
fn display_root_movie_download_failed_message(&self) {
message_box_ok(
"Ruffle - Load failed",
DOWNLOAD_FAILED_MESSAGE,
MessageBoxIcon::Warning,
);
}
fn message(&self, message: &str) {
message_box_ok("Ruffle", message, MessageBoxIcon::Info)
}
}
| 28.988372 | 85 | 0.632972 |
dd7650c321d9d2c6511474efb62ebd22c21d81a3 | 6,746 | // Copyright 2015-2020 Parity Technologies (UK) Ltd.
// This file is part of Parity Ethereum.
// Parity Ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity Ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
//! Account state encoding and decoding
use std::collections::HashSet;
use account_db::{AccountDB, AccountDBMut};
use bytes::Bytes;
use common_types::{
basic_account::BasicAccount,
snapshot::Progress,
errors::SnapshotError as Error,
};
use ethereum_types::{H256, U256};
use ethtrie::{TrieDB, TrieDBMut};
use hash_db::HashDB;
use keccak_hash::{KECCAK_EMPTY, KECCAK_NULL_RLP};
use log::{trace, warn};
use parking_lot::RwLock;
use rlp::{RlpStream, Rlp};
use trie_db::{Trie, TrieMut};
// An empty account -- these were replaced with RLP null data for a space optimization in v1.
pub const ACC_EMPTY: BasicAccount = BasicAccount {
nonce: U256([0, 0, 0, 0]),
balance: U256([0, 0, 0, 0]),
storage_root: KECCAK_NULL_RLP,
code_hash: KECCAK_EMPTY,
code_version: U256([0, 0, 0, 0]),
};
// whether an encoded account has code and how it is referred to.
#[repr(u8)]
enum CodeState {
// the account has no code.
Empty = 0,
// raw code is encoded.
Inline = 1,
// the code is referred to by hash.
Hash = 2,
}
impl CodeState {
fn from(x: u8) -> Result<Self, Error> {
match x {
0 => Ok(CodeState::Empty),
1 => Ok(CodeState::Inline),
2 => Ok(CodeState::Hash),
_ => Err(Error::UnrecognizedCodeState(x))
}
}
fn raw(self) -> u8 {
self as u8
}
}
// walk the account's storage trie, returning a vector of RLP items containing the
// account address hash, account properties and the storage. Each item contains at most `max_storage_items`
// storage records split according to snapshot format definition.
pub fn to_fat_rlps(
account_hash: &H256,
acc: &BasicAccount,
acct_db: &AccountDB,
used_code: &mut HashSet<H256>,
first_chunk_size: usize,
max_chunk_size: usize,
p: &RwLock<Progress>,
) -> Result<Vec<Bytes>, Error> {
let db = &(acct_db as &dyn HashDB<_,_>);
let db = TrieDB::new(db, &acc.storage_root)?;
let mut chunks = Vec::new();
let mut db_iter = db.iter()?;
let mut target_chunk_size = first_chunk_size;
let mut account_stream = RlpStream::new_list(2);
let mut leftover: Option<Vec<u8>> = None;
loop {
account_stream.append(account_hash);
let use_short_version = acc.code_version.is_zero();
match use_short_version {
true => { account_stream.begin_list(5); },
false => { account_stream.begin_list(6); },
}
account_stream.append(&acc.nonce)
.append(&acc.balance);
// [has_code, code_hash].
if acc.code_hash == KECCAK_EMPTY {
account_stream.append(&CodeState::Empty.raw()).append_empty_data();
} else if used_code.contains(&acc.code_hash) {
account_stream.append(&CodeState::Hash.raw()).append(&acc.code_hash);
} else {
match acct_db.get(&acc.code_hash, hash_db::EMPTY_PREFIX) {
Some(c) => {
used_code.insert(acc.code_hash.clone());
account_stream.append(&CodeState::Inline.raw()).append(&&*c);
}
None => {
warn!("code lookup failed during snapshot");
account_stream.append(&false).append_empty_data();
}
}
}
if !use_short_version {
account_stream.append(&acc.code_version);
}
account_stream.begin_unbounded_list();
if account_stream.len() > target_chunk_size {
// account does not fit, push an empty record to mark a new chunk
target_chunk_size = max_chunk_size;
chunks.push(Vec::new());
}
if let Some(pair) = leftover.take() {
if !account_stream.append_raw_checked(&pair, 1, target_chunk_size) {
return Err(Error::ChunkTooSmall);
}
}
loop {
if p.read().abort {
trace!(target: "snapshot", "to_fat_rlps: aborting snapshot");
return Err(Error::SnapshotAborted);
}
match db_iter.next() {
Some(Ok((k, v))) => {
let pair = {
let mut stream = RlpStream::new_list(2);
stream.append(&k).append(&&*v);
stream.drain()
};
if !account_stream.append_raw_checked(&pair, 1, target_chunk_size) {
account_stream.finalize_unbounded_list();
let stream = ::std::mem::replace(&mut account_stream, RlpStream::new_list(2));
chunks.push(stream.out());
target_chunk_size = max_chunk_size;
leftover = Some(pair);
break;
}
},
Some(Err(e)) => {
return Err(e.into());
},
None => {
account_stream.finalize_unbounded_list();
let stream = ::std::mem::replace(&mut account_stream, RlpStream::new_list(2));
chunks.push(stream.out());
return Ok(chunks);
}
}
}
}
}
// decode a fat rlp, and rebuild the storage trie as we go.
// returns the account structure along with its newly recovered code,
// if it exists.
pub fn from_fat_rlp(
acct_db: &mut AccountDBMut,
rlp: Rlp,
mut storage_root: H256,
) -> Result<(BasicAccount, Option<Bytes>), Error> {
// check for special case of empty account.
if rlp.is_empty() {
return Ok((ACC_EMPTY, None));
}
let use_short_version = match rlp.item_count()? {
5 => true,
6 => false,
_ => return Err(rlp::DecoderError::RlpIncorrectListLen.into()),
};
let nonce = rlp.val_at(0)?;
let balance = rlp.val_at(1)?;
let code_state: CodeState = {
let raw: u8 = rlp.val_at(2)?;
CodeState::from(raw)?
};
// load the code if it exists.
let (code_hash, new_code) = match code_state {
CodeState::Empty => (KECCAK_EMPTY, None),
CodeState::Inline => {
let code: Bytes = rlp.val_at(3)?;
let code_hash = acct_db.insert(hash_db::EMPTY_PREFIX, &code);
(code_hash, Some(code))
}
CodeState::Hash => {
let code_hash = rlp.val_at(3)?;
(code_hash, None)
}
};
let code_version = if use_short_version {
U256::zero()
} else {
rlp.val_at(4)?
};
{
let mut storage_trie = if storage_root.is_zero() {
TrieDBMut::new(acct_db, &mut storage_root)
} else {
TrieDBMut::from_existing(acct_db, &mut storage_root)?
};
let pairs = rlp.at(if use_short_version { 4 } else { 5 })?;
for pair_rlp in pairs.iter() {
let k: Bytes = pair_rlp.val_at(0)?;
let v: Bytes = pair_rlp.val_at(1)?;
storage_trie.insert(&k, &v)?;
}
}
let acc = BasicAccount {
nonce,
balance,
storage_root,
code_hash,
code_version,
};
Ok((acc, new_code))
}
| 27.311741 | 107 | 0.676253 |
eb60e34df8f14e5f4259634f7d6bb5673b41fd41 | 608 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[deny(unreachable_code)];
fn g() -> ! { fail!(); }
fn f() -> ! {
return g();
g(); //~ ERROR: unreachable statement
}
fn main() { }
| 32 | 68 | 0.685855 |
d60649d7572d50097bab90ddbbd879f54c500cdb | 13,794 | use std::marker::PhantomData;
use na::{self, DVector, Dim, Dynamic, Real, U1, VectorSliceN};
// FIXME: could we just merge UnilateralConstraint and Bilateral constraint into a single structure
// without performance impact due to clamping?
use math::{SpatialDim, SPATIAL_DIM};
use solver::{BilateralConstraint, BilateralGroundConstraint, ImpulseLimits, UnilateralConstraint,
UnilateralGroundConstraint};
/// A SOR-Prox velocity-based constraints solver.
pub struct SORProx<N: Real> {
_phantom: PhantomData<N>,
}
impl<N: Real> SORProx<N> {
/// Initialize a new velocity-based constraints solver.
pub fn new() -> Self {
SORProx {
_phantom: PhantomData,
}
}
/// Solve the given set of constraints.
pub fn solve(
&self,
unilateral_ground: &mut [UnilateralGroundConstraint<N>],
unilateral: &mut [UnilateralConstraint<N>],
bilateral_ground: &mut [BilateralGroundConstraint<N>],
bilateral: &mut [BilateralConstraint<N>],
mj_lambda: &mut DVector<N>,
jacobians: &[N],
max_iter: usize,
) {
/*
* Setup constraints.
*/
for c in unilateral.iter_mut() {
let dim1 = Dynamic::new(c.ndofs1);
let dim2 = Dynamic::new(c.ndofs2);
self.setup_unilateral(c, jacobians, mj_lambda, dim1, dim2);
}
for c in unilateral_ground.iter_mut() {
let dim = Dynamic::new(c.ndofs);
self.setup_unilateral_ground(c, jacobians, mj_lambda, dim);
}
for c in bilateral.iter_mut() {
let dim1 = Dynamic::new(c.ndofs1);
let dim2 = Dynamic::new(c.ndofs2);
self.setup_bilateral(c, jacobians, mj_lambda, dim1, dim2);
}
for c in bilateral_ground.iter_mut() {
self.setup_bilateral_ground(c, jacobians, mj_lambda, Dynamic::new(c.ndofs));
}
/*
* Solve.
*/
for _ in 0..max_iter {
self.step(
unilateral_ground,
unilateral,
bilateral_ground,
bilateral,
jacobians,
mj_lambda,
)
}
}
fn step(
&self,
unilateral_ground: &mut [UnilateralGroundConstraint<N>],
unilateral: &mut [UnilateralConstraint<N>],
bilateral_ground: &mut [BilateralGroundConstraint<N>],
bilateral: &mut [BilateralConstraint<N>],
jacobians: &[N],
mj_lambda: &mut DVector<N>,
) {
for c in unilateral.iter_mut() {
if c.ndofs1 == SPATIAL_DIM && c.ndofs2 == SPATIAL_DIM {
// Most common case (between two free rigid bodies).
self.solve_unilateral(c, jacobians, mj_lambda, SpatialDim {}, SpatialDim {})
} else {
let dim1 = Dynamic::new(c.ndofs1);
let dim2 = Dynamic::new(c.ndofs2);
self.solve_unilateral(c, jacobians, mj_lambda, dim1, dim2)
}
}
for c in unilateral_ground.iter_mut() {
if c.ndofs == SPATIAL_DIM {
// Most common case (with one free rigid body).
// NOTE: it's weird that the compiler requires the { } even though SpatialDim is the
// alias of a marker type.
self.solve_unilateral_ground(c, jacobians, mj_lambda, SpatialDim {})
} else {
let dim = Dynamic::new(c.ndofs);
self.solve_unilateral_ground(c, jacobians, mj_lambda, dim)
}
}
for c in bilateral.iter_mut() {
if c.ndofs1 == SPATIAL_DIM && c.ndofs2 == SPATIAL_DIM {
// Most common case (between two free rigid bodies).
self.solve_bilateral(
c,
unilateral,
jacobians,
mj_lambda,
SpatialDim {},
SpatialDim {},
)
} else {
let dim1 = Dynamic::new(c.ndofs1);
let dim2 = Dynamic::new(c.ndofs2);
self.solve_bilateral(c, unilateral, jacobians, mj_lambda, dim1, dim2)
}
}
for c in bilateral_ground.iter_mut() {
if c.ndofs == SPATIAL_DIM {
// Most common case (with one free rigid body).
self.solve_bilateral_ground(
c,
unilateral_ground,
jacobians,
mj_lambda,
SpatialDim {},
)
} else {
let dim = Dynamic::new(c.ndofs);
self.solve_bilateral_ground(c, unilateral_ground, jacobians, mj_lambda, dim)
}
}
}
fn solve_unilateral<D1: Dim, D2: Dim>(
&self,
c: &mut UnilateralConstraint<N>,
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim1: D1,
dim2: D2,
) {
let id1 = c.assembly_id1;
let id2 = c.assembly_id2;
let jacobian1 = VectorSliceN::from_slice_generic(&jacobians[c.j_id1..], dim1, U1);
let jacobian2 = VectorSliceN::from_slice_generic(&jacobians[c.j_id2..], dim2, U1);
let weighted_jacobian1 = VectorSliceN::from_slice_generic(&jacobians[c.wj_id1..], dim1, U1);
let weighted_jacobian2 = VectorSliceN::from_slice_generic(&jacobians[c.wj_id2..], dim2, U1);
let dimpulse = jacobian1.dot(&mj_lambda.rows_generic(id1, dim1))
+ jacobian2.dot(&mj_lambda.rows_generic(id2, dim2)) + c.rhs;
let new_impulse = na::sup(&N::zero(), &(c.impulse - c.r * dimpulse));
let dlambda = new_impulse - c.impulse;
c.impulse = new_impulse;
mj_lambda
.rows_generic_mut(id1, dim1)
.axpy(dlambda, &weighted_jacobian1, N::one());
mj_lambda
.rows_generic_mut(id2, dim2)
.axpy(dlambda, &weighted_jacobian2, N::one());
}
fn solve_unilateral_ground<D: Dim>(
&self,
c: &mut UnilateralGroundConstraint<N>,
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim: D,
) {
let jacobian = VectorSliceN::from_slice_generic(&jacobians[c.j_id..], dim, U1);
let weighted_jacobian = VectorSliceN::from_slice_generic(&jacobians[c.wj_id..], dim, U1);
let dimpulse = jacobian.dot(&mj_lambda.rows_generic_mut(c.assembly_id, dim)) + c.rhs;
let new_impulse = na::sup(&N::zero(), &(c.impulse - c.r * dimpulse));
let dlambda = new_impulse - c.impulse;
c.impulse = new_impulse;
mj_lambda
.rows_generic_mut(c.assembly_id, dim)
.axpy(dlambda, &weighted_jacobian, N::one());
}
fn solve_bilateral<D1: Dim, D2: Dim>(
&self,
c: &mut BilateralConstraint<N>,
unilateral: &[UnilateralConstraint<N>],
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim1: D1,
dim2: D2,
) {
let id1 = c.assembly_id1;
let id2 = c.assembly_id2;
let min_impulse;
let max_impulse;
match c.limits {
ImpulseLimits::Independent { min, max } => {
min_impulse = min;
max_impulse = max;
}
ImpulseLimits::Dependent { dependency, coeff } => {
let impulse = unilateral[dependency].impulse;
if impulse.is_zero() {
if !c.impulse.is_zero() {
let wj1 =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id1..], dim1, U1);
let wj2 =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id2..], dim2, U1);
mj_lambda
.rows_generic_mut(id1, dim1)
.axpy(-c.impulse, &wj1, N::one());
mj_lambda
.rows_generic_mut(id2, dim2)
.axpy(-c.impulse, &wj2, N::one());
c.impulse = N::zero();
}
return;
}
max_impulse = coeff * impulse;
min_impulse = -max_impulse;
}
}
let jacobian1 = VectorSliceN::from_slice_generic(&jacobians[c.j_id1..], dim1, U1);
let jacobian2 = VectorSliceN::from_slice_generic(&jacobians[c.j_id2..], dim2, U1);
let weighted_jacobian1 = VectorSliceN::from_slice_generic(&jacobians[c.wj_id1..], dim1, U1);
let weighted_jacobian2 = VectorSliceN::from_slice_generic(&jacobians[c.wj_id2..], dim2, U1);
let dimpulse = jacobian1.dot(&mj_lambda.rows_generic(id1, dim1))
+ jacobian2.dot(&mj_lambda.rows_generic(id2, dim2)) + c.rhs;
let new_impulse = na::clamp(c.impulse - c.r * dimpulse, min_impulse, max_impulse);
let dlambda = new_impulse - c.impulse;
c.impulse = new_impulse;
mj_lambda
.rows_generic_mut(id1, dim1)
.axpy(dlambda, &weighted_jacobian1, N::one());
mj_lambda
.rows_generic_mut(id2, dim2)
.axpy(dlambda, &weighted_jacobian2, N::one());
}
fn solve_bilateral_ground<D: Dim>(
&self,
c: &mut BilateralGroundConstraint<N>,
unilateral: &[UnilateralGroundConstraint<N>],
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim: D,
) {
let min_impulse;
let max_impulse;
match c.limits {
ImpulseLimits::Independent { min, max } => {
min_impulse = min;
max_impulse = max;
}
ImpulseLimits::Dependent { dependency, coeff } => {
let impulse = unilateral[dependency].impulse;
if impulse.is_zero() {
if !c.impulse.is_zero() {
let wj = VectorSliceN::from_slice_generic(&jacobians[c.wj_id..], dim, U1);
mj_lambda.rows_generic_mut(c.assembly_id, dim).axpy(
-c.impulse,
&wj,
N::one(),
);
c.impulse = N::zero();
}
return;
}
max_impulse = coeff * impulse;
min_impulse = -max_impulse;
}
}
let jacobian = VectorSliceN::from_slice_generic(&jacobians[c.j_id..], dim, U1);
let weighted_jacobian = VectorSliceN::from_slice_generic(&jacobians[c.wj_id..], dim, U1);
let dimpulse = jacobian.dot(&mj_lambda.rows_generic(c.assembly_id, dim)) + c.rhs;
let new_impulse = na::clamp(c.impulse - c.r * dimpulse, min_impulse, max_impulse);
let dlambda = new_impulse - c.impulse;
c.impulse = new_impulse;
mj_lambda
.rows_generic_mut(c.assembly_id, dim)
.axpy(dlambda, &weighted_jacobian, N::one());
}
fn setup_unilateral<D1: Dim, D2: Dim>(
&self,
c: &UnilateralConstraint<N>,
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim1: D1,
dim2: D2,
) {
if !c.impulse.is_zero() {
let id1 = c.assembly_id1;
let id2 = c.assembly_id2;
let weighted_jacobian1 =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id1..], dim1, U1);
let weighted_jacobian2 =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id2..], dim2, U1);
mj_lambda
.rows_generic_mut(id1, dim1)
.axpy(c.impulse, &weighted_jacobian1, N::one());
mj_lambda
.rows_generic_mut(id2, dim2)
.axpy(c.impulse, &weighted_jacobian2, N::one());
}
}
fn setup_unilateral_ground<D: Dim>(
&self,
c: &UnilateralGroundConstraint<N>,
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim: D,
) {
if !c.impulse.is_zero() {
let weighted_jacobian =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id..], dim, U1);
mj_lambda.rows_generic_mut(c.assembly_id, dim).axpy(
c.impulse,
&weighted_jacobian,
N::one(),
);
}
}
fn setup_bilateral<D1: Dim, D2: Dim>(
&self,
c: &BilateralConstraint<N>,
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim1: D1,
dim2: D2,
) {
if !c.impulse.is_zero() {
let id1 = c.assembly_id1;
let id2 = c.assembly_id2;
let weighted_jacobian1 =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id1..], dim1, U1);
let weighted_jacobian2 =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id2..], dim2, U1);
mj_lambda
.rows_generic_mut(id1, dim1)
.axpy(c.impulse, &weighted_jacobian1, N::one());
mj_lambda
.rows_generic_mut(id2, dim2)
.axpy(c.impulse, &weighted_jacobian2, N::one());
}
}
fn setup_bilateral_ground<D: Dim>(
&self,
c: &BilateralGroundConstraint<N>,
jacobians: &[N],
mj_lambda: &mut DVector<N>,
dim: D,
) {
if !c.impulse.is_zero() {
let weighted_jacobian =
VectorSliceN::from_slice_generic(&jacobians[c.wj_id..], dim, U1);
mj_lambda.rows_generic_mut(c.assembly_id, dim).axpy(
c.impulse,
&weighted_jacobian,
N::one(),
);
}
}
}
| 34.745592 | 100 | 0.526171 |
50f13b19a099f0bd21ebed4ef7128d280ae1c423 | 3,821 | use crate::syntax::atom::Atom::{self, *};
use crate::syntax::set::OrderedSet as Set;
use crate::syntax::{Api, Derive, ExternType, Struct, Type};
use proc_macro2::Ident;
use quote::quote;
use std::collections::BTreeMap as Map;
use syn::{Error, Result};
pub struct Types<'a> {
pub all: Set<'a, Type>,
pub structs: Map<Ident, &'a Struct>,
pub cxx: Set<'a, Ident>,
pub rust: Set<'a, Ident>,
}
impl<'a> Types<'a> {
pub fn collect(apis: &'a [Api]) -> Result<Self> {
let mut all = Set::new();
let mut structs = Map::new();
let mut cxx = Set::new();
let mut rust = Set::new();
fn visit<'a>(all: &mut Set<'a, Type>, ty: &'a Type) {
all.insert(ty);
match ty {
Type::Ident(_) | Type::Str(_) => {}
Type::RustBox(ty) | Type::UniquePtr(ty) => visit(all, &ty.inner),
Type::Ref(r) => visit(all, &r.inner),
}
}
for api in apis {
match api {
Api::Include(_) => {}
Api::Struct(strct) => {
let ident = &strct.ident;
if structs.contains_key(ident) || cxx.contains(ident) || rust.contains(ident) {
return Err(duplicate_struct(strct));
}
structs.insert(strct.ident.clone(), strct);
for field in &strct.fields {
visit(&mut all, &field.ty);
}
}
Api::CxxType(ety) => {
let ident = &ety.ident;
if structs.contains_key(ident) || cxx.contains(ident) || rust.contains(ident) {
return Err(duplicate_type(ety));
}
cxx.insert(ident);
}
Api::RustType(ety) => {
let ident = &ety.ident;
if structs.contains_key(ident) || cxx.contains(ident) || rust.contains(ident) {
return Err(duplicate_type(ety));
}
rust.insert(ident);
}
Api::CxxFunction(efn) | Api::RustFunction(efn) => {
for arg in &efn.args {
visit(&mut all, &arg.ty);
}
if let Some(ret) = &efn.ret {
visit(&mut all, ret);
}
}
}
}
Ok(Types {
all,
structs,
cxx,
rust,
})
}
pub fn needs_indirect_abi(&self, ty: &Type) -> bool {
match ty {
Type::Ident(ident) => {
if let Some(strct) = self.structs.get(ident) {
!self.is_pod(strct)
} else {
Atom::from(ident) == Some(RustString)
}
}
_ => false,
}
}
pub fn is_pod(&self, strct: &Struct) -> bool {
for derive in &strct.derives {
if *derive == Derive::Copy {
return true;
}
}
false
}
}
impl<'t, 'a> IntoIterator for &'t Types<'a> {
type Item = &'a Type;
type IntoIter = crate::syntax::set::Iter<'t, 'a, Type>;
fn into_iter(self) -> Self::IntoIter {
self.all.into_iter()
}
}
fn duplicate_struct(strct: &Struct) -> Error {
let struct_token = strct.struct_token;
let ident = &strct.ident;
let range = quote!(#struct_token #ident);
Error::new_spanned(range, "duplicate type")
}
fn duplicate_type(ety: &ExternType) -> Error {
let type_token = ety.type_token;
let ident = &ety.ident;
let range = quote!(#type_token #ident);
Error::new_spanned(range, "duplicate type")
}
| 31.319672 | 99 | 0.455378 |
1d8cb076f234350e4333d50c856792f3fcd949b5 | 11,600 | // Copyright (c) The XPeer Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Suppose we have the following data structure in a smart contract:
//!
//! struct B {
//! Map<String, String> mymap;
//! }
//!
//! struct A {
//! B b;
//! int my_int;
//! }
//!
//! struct C {
//! List<int> mylist;
//! }
//!
//! A a;
//! C c;
//!
//! and the data belongs to Alice. Then an access to `a.b.mymap` would be translated to an access
//! to an entry in key-value store whose key is `<Alice>/a/b/mymap`. In the same way, the access to
//! `c.mylist` would need to query `<Alice>/c/mylist`.
//!
//! So an account stores its data in a directory structure, for example:
//! <Alice>/balance: 10
//! <Alice>/a/b/mymap: {"Bob" => "abcd", "Carol" => "efgh"}
//! <Alice>/a/myint: 20
//! <Alice>/c/mylist: [3, 5, 7, 9]
//!
//! If someone needs to query the map above and find out what value associated with "Bob" is,
//! `address` will be set to Alice and `path` will be set to "/a/b/mymap/Bob".
//!
//! On the other hand, if you want to query only <Alice>/a/*, `address` will be set to Alice and
//! `path` will be set to "/a" and use the `get_prefix()` method from statedb
// This is caused by deriving Arbitrary for AccessPath.
#![allow(clippy::unit_arg)]
use crate::{
account_address::AccountAddress,
account_config::{
account_received_event_path, account_resource_path, account_sent_event_path,
association_address,
},
language_storage::{ModuleId, ResourceKey, StructTag},
validator_set::validator_set_path,
};
use canonical_serialization::{
CanonicalDeserialize, CanonicalDeserializer, CanonicalSerialize, CanonicalSerializer,
};
use crypto::hash::{CryptoHash, HashValue};
use failure::prelude::*;
use hex;
use lazy_static::lazy_static;
use proptest_derive::Arbitrary;
use proto_conv::{FromProto, IntoProto};
use radix_trie::TrieKey;
use serde::{Deserialize, Serialize};
use std::{
fmt::{self, Formatter},
slice::Iter,
str::{self, FromStr},
};
#[derive(Default, Serialize, Deserialize, Debug, PartialEq, Hash, Eq, Clone, Ord, PartialOrd)]
pub struct Field(String);
impl Field {
pub fn new(s: &str) -> Field {
Field(s.to_string())
}
pub fn name(&self) -> &String {
&self.0
}
}
impl From<String> for Field {
fn from(s: String) -> Self {
Field(s)
}
}
impl fmt::Display for Field {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Eq, Hash, Serialize, Deserialize, Debug, Clone, PartialEq, Ord, PartialOrd)]
pub enum Access {
Field(Field),
Index(u64),
}
impl Access {
pub fn new(s: &str) -> Self {
Access::Field(Field::new(s))
}
}
impl FromStr for Access {
type Err = ::std::num::ParseIntError;
fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> {
if let Ok(idx) = s.parse::<u64>() {
Ok(Access::Index(idx))
} else {
Ok(Access::Field(Field::new(s)))
}
}
}
impl fmt::Display for Access {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Access::Field(field) => write!(f, "\"{}\"", field),
Access::Index(i) => write!(f, "{}", i),
}
}
}
/// Non-empty sequence of field accesses
#[derive(Eq, Hash, Serialize, Deserialize, Debug, Clone, PartialEq, Ord, PartialOrd)]
pub struct Accesses(Vec<Access>);
/// SEPARATOR is used as a delimiter between fields. It should not be a legal part of any identifier
/// in the language
const SEPARATOR: char = '/';
impl Accesses {
pub fn empty() -> Self {
Accesses(vec![])
}
pub fn new(field: Field) -> Self {
Accesses(vec![Access::Field(field)])
}
/// Add a field to the end of the sequence
pub fn add_field_to_back(&mut self, field: Field) {
self.0.push(Access::Field(field))
}
/// Add an index to the end of the sequence
pub fn add_index_to_back(&mut self, idx: u64) {
self.0.push(Access::Index(idx))
}
pub fn append(&mut self, accesses: &mut Accesses) {
self.0.append(&mut accesses.0)
}
/// Returns the first field in the sequence and reference to the remaining fields
pub fn split_first(&self) -> (&Access, &[Access]) {
self.0.split_first().unwrap()
}
/// Return the last access in the sequence
pub fn last(&self) -> &Access {
self.0.last().unwrap() // guaranteed not to fail because sequence is non-empty
}
pub fn iter(&self) -> Iter<'_, Access> {
self.0.iter()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn as_separated_string(&self) -> String {
let mut path = String::new();
for access in self.0.iter() {
match access {
Access::Field(s) => {
let access_str = s.name().as_ref();
assert!(access_str != "");
path.push_str(access_str)
}
Access::Index(i) => path.push_str(i.to_string().as_ref()),
};
path.push(SEPARATOR);
}
path
}
pub fn take_nth(&self, new_len: usize) -> Accesses {
assert!(self.0.len() >= new_len);
Accesses(self.0.clone().into_iter().take(new_len).collect())
}
}
impl<'a> IntoIterator for &'a Accesses {
type Item = &'a Access;
type IntoIter = Iter<'a, Access>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
impl From<Vec<Access>> for Accesses {
fn from(accesses: Vec<Access>) -> Accesses {
Accesses(accesses)
}
}
impl From<Vec<u8>> for Accesses {
fn from(mut raw_bytes: Vec<u8>) -> Accesses {
let access_str = String::from_utf8(raw_bytes.split_off(HashValue::LENGTH + 1)).unwrap();
let fields_str = access_str.split(SEPARATOR).collect::<Vec<&str>>();
let mut accesses = vec![];
for access_str in fields_str.into_iter() {
if access_str != "" {
accesses.push(Access::from_str(access_str).unwrap());
}
}
Accesses::from(accesses)
}
}
impl TrieKey for Accesses {
fn encode_bytes(&self) -> Vec<u8> {
self.as_separated_string().into_bytes()
}
}
lazy_static! {
/// The access path where the Validator Set resource is stored.
pub static ref VALIDATOR_SET_ACCESS_PATH: AccessPath =
AccessPath::new(association_address(), validator_set_path());
}
#[derive(
Clone,
Eq,
PartialEq,
Default,
Hash,
Serialize,
Deserialize,
Ord,
PartialOrd,
Arbitrary,
FromProto,
IntoProto,
)]
#[ProtoType(crate::proto::access_path::AccessPath)]
pub struct AccessPath {
pub address: AccountAddress,
pub path: Vec<u8>,
}
impl AccessPath {
const CODE_TAG: u8 = 0;
const RESOURCE_TAG: u8 = 1;
pub fn new(address: AccountAddress, path: Vec<u8>) -> Self {
AccessPath { address, path }
}
/// Given an address, returns the corresponding access path that stores the Account resource.
pub fn new_for_account(address: AccountAddress) -> Self {
Self::new(address, account_resource_path())
}
/// Create an AccessPath for a ContractEvent.
/// That is an AccessPah that uniquely identifies a given event for a published resource.
pub fn new_for_event(address: AccountAddress, root: &[u8], key: &[u8]) -> Self {
let mut path: Vec<u8> = Vec::new();
path.extend_from_slice(root);
path.push(b'/');
path.extend_from_slice(key);
path.push(b'/');
Self::new(address, path)
}
/// Create an AccessPath to the event for the sender account in a deposit operation.
/// The sent counter in XPeerAccount.T (XPeerAccount.T.sent_events_count) is used to generate
/// the AccessPath.
/// That AccessPath can be used as a key into the event storage to retrieve all sent
/// events for a given account.
pub fn new_for_sent_event(address: AccountAddress) -> Self {
Self::new(address, account_sent_event_path())
}
/// Create an AccessPath to the event for the target account (the receiver)
/// in a deposit operation.
/// The received counter in XPeerAccount.T (XPeerAccount.T.received_events_count) is used to
/// generate the AccessPath.
/// That AccessPath can be used as a key into the event storage to retrieve all received
/// events for a given account.
pub fn new_for_received_event(address: AccountAddress) -> Self {
Self::new(address, account_received_event_path())
}
pub fn resource_access_vec(tag: &StructTag, accesses: &Accesses) -> Vec<u8> {
let mut key = vec![];
key.push(Self::RESOURCE_TAG);
key.append(&mut tag.hash().to_vec());
// We don't need accesses in production right now. Accesses are appended here just for
// passing the old tests.
key.append(&mut accesses.as_separated_string().into_bytes());
key
}
/// Convert Accesses into a byte offset which would be used by the storage layer to resolve
/// where fields are stored.
pub fn resource_access_path(key: &ResourceKey, accesses: &Accesses) -> AccessPath {
let path = AccessPath::resource_access_vec(&key.type_(), accesses);
AccessPath {
address: key.address().to_owned(),
path,
}
}
fn code_access_path_vec(key: &ModuleId) -> Vec<u8> {
let mut root = vec![];
root.push(Self::CODE_TAG);
root.append(&mut key.hash().to_vec());
root
}
pub fn code_access_path(key: &ModuleId) -> AccessPath {
let path = AccessPath::code_access_path_vec(key);
AccessPath {
address: *key.address(),
path,
}
}
}
impl fmt::Debug for AccessPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"AccessPath {{ address: {:x}, path: {} }}",
self.address,
hex::encode(&self.path)
)
}
}
impl fmt::Display for AccessPath {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.path.len() < 1 + HashValue::LENGTH {
write!(f, "{:?}", self)
} else {
write!(f, "AccessPath {{ address: {:x}, ", self.address)?;
match self.path[0] {
Self::RESOURCE_TAG => write!(f, "type: Resource, ")?,
Self::CODE_TAG => write!(f, "type: Module, ")?,
tag => write!(f, "type: {:?}, ", tag)?,
};
write!(
f,
"hash: {:?}, ",
hex::encode(&self.path[1..=HashValue::LENGTH])
)?;
write!(
f,
"suffix: {:?} }} ",
String::from_utf8_lossy(&self.path[1 + HashValue::LENGTH..])
)
}
}
}
impl CanonicalSerialize for AccessPath {
fn serialize(&self, serializer: &mut impl CanonicalSerializer) -> Result<()> {
serializer
.encode_struct(&self.address)?
.encode_variable_length_bytes(&self.path)?;
Ok(())
}
}
impl CanonicalDeserialize for AccessPath {
fn deserialize(deserializer: &mut impl CanonicalDeserializer) -> Result<Self> {
let address = deserializer.decode_struct::<AccountAddress>()?;
let path = deserializer.decode_variable_length_bytes()?;
Ok(Self { address, path })
}
}
| 29.441624 | 100 | 0.592241 |
fe8ab36ebb5f4eeffc987a7fbb803b7a2f46cd9b | 104,586 | //! Activation frames
use crate::avm2::array::ArrayStorage;
use crate::avm2::class::Class;
use crate::avm2::domain::Domain;
use crate::avm2::method::{BytecodeMethod, Method, ParamConfig};
use crate::avm2::names::{Multiname, Namespace, QName};
use crate::avm2::object::{
ArrayObject, ByteArrayObject, ClassObject, FunctionObject, NamespaceObject, ScriptObject,
};
use crate::avm2::object::{Object, TObject};
use crate::avm2::scope::{Scope, ScopeChain, ScopeStack};
use crate::avm2::script::Script;
use crate::avm2::value::Value;
use crate::avm2::{value, Avm2, Error};
use crate::context::UpdateContext;
use crate::string::AvmString;
use crate::swf::extensions::ReadSwfExt;
use gc_arena::{Gc, GcCell, MutationContext};
use smallvec::SmallVec;
use std::borrow::Cow;
use std::cmp::{min, Ordering};
use swf::avm2::read::Reader;
use swf::avm2::types::{
Class as AbcClass, Index, Method as AbcMethod, Multiname as AbcMultiname,
Namespace as AbcNamespace, Op,
};
/// Represents a particular register set.
///
/// This type exists primarily because SmallVec isn't garbage-collectable.
#[derive(Clone)]
pub struct RegisterSet<'gc>(SmallVec<[Value<'gc>; 8]>);
unsafe impl<'gc> gc_arena::Collect for RegisterSet<'gc> {
#[inline]
fn trace(&self, cc: gc_arena::CollectionContext) {
for register in &self.0 {
register.trace(cc);
}
}
}
impl<'gc> RegisterSet<'gc> {
/// Create a new register set with a given number of specified registers.
///
/// The given registers will be set to `undefined`.
pub fn new(num: u32) -> Self {
Self(smallvec![Value::Undefined; num as usize])
}
/// Return a reference to a given register, if it exists.
pub fn get(&self, num: u32) -> Option<&Value<'gc>> {
self.0.get(num as usize)
}
/// Return a mutable reference to a given register, if it exists.
pub fn get_mut(&mut self, num: u32) -> Option<&mut Value<'gc>> {
self.0.get_mut(num as usize)
}
}
#[derive(Debug, Clone)]
enum FrameControl<'gc> {
Continue,
Return(Value<'gc>),
}
/// Represents a single activation of a given AVM2 function or keyframe.
pub struct Activation<'a, 'gc: 'a, 'gc_context: 'a> {
/// The immutable value of `this`.
#[allow(dead_code)]
this: Option<Object<'gc>>,
/// The arguments this function was called by.
#[allow(dead_code)]
arguments: Option<Object<'gc>>,
/// Flags that the current activation frame is being executed and has a
/// reader object copied from it. Taking out two readers on the same
/// activation frame is a programming error.
is_executing: bool,
/// Amount of actions performed since the last timeout check
actions_since_timeout_check: u16,
/// Local registers.
///
/// All activations have local registers, but it is possible for multiple
/// activations (such as a rescope) to execute from the same register set.
local_registers: GcCell<'gc, RegisterSet<'gc>>,
/// What was returned from the function.
///
/// A return value of `None` indicates that the called function is still
/// executing. Functions that do not return instead return `Undefined`.
#[allow(dead_code)]
return_value: Option<Value<'gc>>,
/// The current scope stack.
scope_stack: ScopeStack<'gc>,
/// This represents the outer scope of the method that is executing.
///
/// The outer scope gives an activation access to the "outer world", including
/// the current Domain.
outer: ScopeChain<'gc>,
/// The domain of the original AS3 caller.
///
/// This is intended exclusively for builtin methods to access the domain of the
/// bytecode method that called it.
///
/// If this activation was not made for a builtin method, this will be the
/// current domain instead.
caller_domain: Domain<'gc>,
/// The class that yielded the currently executing method.
///
/// This is used to maintain continuity when multiple methods supercall
/// into one another. For example, if a class method supercalls a
/// grandparent class's method, then this value will be the grandparent's
/// class object. Then, if we supercall again, we look up supermethods from
/// the great-grandparent class, preventing us from accidentally calling
/// the same method again.
///
/// This will not be available outside of method, setter, or getter calls.
subclass_object: Option<ClassObject<'gc>>,
/// The class of all objects returned from `newactivation`.
///
/// In method calls that call for an activation object, this will be
/// configured as the anonymous class whose traits match the method's
/// declared traits.
///
/// If this is `None`, then the method did not ask for an activation object
/// and we will not allocate a class for one.
activation_class: Option<ClassObject<'gc>>,
pub context: UpdateContext<'a, 'gc, 'gc_context>,
}
impl<'a, 'gc, 'gc_context> Activation<'a, 'gc, 'gc_context> {
/// Construct an activation that does not represent any particular scope.
///
/// This exists primarily for non-AVM2 related manipulations of the
/// interpreter environment that require an activation. For example,
/// loading traits into an object, or running tests.
///
/// It is a logic error to attempt to run AVM2 code in a nothing
/// `Activation`.
pub fn from_nothing(context: UpdateContext<'a, 'gc, 'gc_context>) -> Self {
let local_registers = GcCell::allocate(context.gc_context, RegisterSet::new(0));
Self {
this: None,
arguments: None,
is_executing: false,
actions_since_timeout_check: 0,
local_registers,
return_value: None,
scope_stack: ScopeStack::new(),
outer: ScopeChain::new(context.avm2.globals),
caller_domain: context.avm2.globals,
subclass_object: None,
activation_class: None,
context,
}
}
/// Construct an activation for the execution of a particular script's
/// initializer method.
pub fn from_script(
context: UpdateContext<'a, 'gc, 'gc_context>,
script: Script<'gc>,
) -> Result<Self, Error> {
let (method, global_object, domain) = script.init();
let num_locals = match method {
Method::Native { .. } => 0,
Method::Bytecode(bytecode) => {
let body: Result<_, Error> = bytecode.body().ok_or_else(|| {
"Cannot execute non-native method (for script) without body".into()
});
body?.num_locals
}
};
let local_registers =
GcCell::allocate(context.gc_context, RegisterSet::new(num_locals + 1));
*local_registers
.write(context.gc_context)
.get_mut(0)
.unwrap() = global_object.into();
Ok(Self {
this: Some(global_object),
arguments: None,
is_executing: false,
actions_since_timeout_check: 0,
local_registers,
return_value: None,
scope_stack: ScopeStack::new(),
outer: ScopeChain::new(domain),
caller_domain: domain,
subclass_object: None,
activation_class: None,
context,
})
}
/// Finds an object on either the current or outer scope of this activation by definition.
pub fn find_definition(&mut self, name: &Multiname<'gc>) -> Result<Option<Object<'gc>>, Error> {
let outer_scope = self.outer;
if let Some(obj) = self.scope_stack.find(name, outer_scope.is_empty())? {
Ok(Some(obj))
} else if let Some(obj) = outer_scope.find(name, self)? {
Ok(Some(obj))
} else {
Ok(None)
}
}
/// Resolves a definition using either the current or outer scope of this activation.
pub fn resolve_definition(
&mut self,
name: &Multiname<'gc>,
) -> Result<Option<Value<'gc>>, Error> {
let outer_scope = self.outer;
if let Some(obj) = self.scope_stack.find(name, outer_scope.is_empty())? {
Ok(Some(obj.get_property(obj, name, self)?))
} else if let Some(result) = outer_scope.resolve(name, self)? {
Ok(Some(result))
} else {
Ok(None)
}
}
/// Resolve a type name to a class.
///
/// This returns an error if a type is named but does not exist; or if the
/// typed named is not a class object.
fn resolve_type(
&mut self,
type_name: Multiname<'gc>,
) -> Result<Option<ClassObject<'gc>>, Error> {
if type_name.is_any() {
return Ok(None);
}
let class = self
.resolve_definition(&type_name)?
.ok_or_else(|| format!("Could not resolve parameter type {:?}", type_name))?
.coerce_to_object(self)?;
let class = class
.as_class_object()
.ok_or_else(|| format!("Resolved parameter type {:?} is not a class", type_name))?;
// Type parameters should specialize the returned class.
// Unresolvable parameter types are treated as Any, which is treated as
// Object.
if !type_name.params().is_empty() {
let mut param_types = Vec::with_capacity(type_name.params().len());
for param in type_name.params() {
param_types.push(match self.resolve_type(param.clone())? {
Some(o) => Value::Object(o.into()),
None => Value::Null,
});
}
return Ok(Some(class.apply(self, ¶m_types[..])?));
}
Ok(Some(class))
}
/// Resolve a single parameter value.
///
/// Given an individual parameter value and the associated parameter's
/// configuration, return what value should be stored in the called
/// function's local registers (or an error, if the parameter violates the
/// signature of the current called method).
fn resolve_parameter(
&mut self,
method_name: &str,
value: Option<&Value<'gc>>,
param_config: &ParamConfig<'gc>,
index: usize,
) -> Result<Value<'gc>, Error> {
let arg = if let Some(value) = value {
Cow::Borrowed(value)
} else if let Some(default) = ¶m_config.default_value {
Cow::Borrowed(default)
} else if param_config.param_type_name.is_any() {
return Ok(Value::Undefined);
} else {
return Err(format!(
"Param {} (index {}) was missing when calling {}",
param_config.param_name, index, method_name
)
.into());
};
let type_name = param_config.param_type_name.clone();
let param_type = self.resolve_type(type_name)?;
if let Some(param_type) = param_type {
arg.coerce_to_type(self, param_type)
} else {
Ok(arg.into_owned())
}
}
/// Statically resolve all of the parameters for a given method.
///
/// This function makes no attempt to enforce a given method's parameter
/// count limits or to package variadic arguments.
///
/// The returned list of parameters will be coerced to the stated types in
/// the signature, with missing parameters filled in with defaults.
pub fn resolve_parameters(
&mut self,
method_name: &str,
user_arguments: &[Value<'gc>],
signature: &[ParamConfig<'gc>],
) -> Result<Vec<Value<'gc>>, Error> {
let mut arguments_list = Vec::new();
for (i, (arg, param_config)) in user_arguments.iter().zip(signature.iter()).enumerate() {
arguments_list.push(self.resolve_parameter(method_name, Some(arg), param_config, i)?);
}
match user_arguments.len().cmp(&signature.len()) {
Ordering::Greater => {
//Variadic parameters exist, just push them into the list
arguments_list.extend_from_slice(&user_arguments[signature.len()..])
}
Ordering::Less => {
//Apply remaining default parameters
for (i, param_config) in signature[user_arguments.len()..].iter().enumerate() {
arguments_list.push(self.resolve_parameter(
method_name,
None,
param_config,
i + user_arguments.len(),
)?);
}
}
_ => {}
}
Ok(arguments_list)
}
/// Construct an activation for the execution of a particular bytecode
/// method.
pub fn from_method(
mut context: UpdateContext<'a, 'gc, 'gc_context>,
method: Gc<'gc, BytecodeMethod<'gc>>,
outer: ScopeChain<'gc>,
this: Option<Object<'gc>>,
user_arguments: &[Value<'gc>],
subclass_object: Option<ClassObject<'gc>>,
callee: Object<'gc>,
) -> Result<Self, Error> {
let body: Result<_, Error> = method
.body()
.ok_or_else(|| "Cannot execute non-native method without body".into());
let body = body?;
let num_locals = body.num_locals;
let has_rest_or_args = method.is_variadic();
let arg_register = if has_rest_or_args { 1 } else { 0 };
let signature = method.signature();
if user_arguments.len() > signature.len() && !has_rest_or_args {
return Err(format!(
"Attempted to call {:?} with {} arguments (more than {} is prohibited)",
method.method_name(),
user_arguments.len(),
signature.len()
)
.into());
}
let num_declared_arguments = signature.len() as u32;
let local_registers = GcCell::allocate(
context.gc_context,
RegisterSet::new(num_locals + num_declared_arguments + arg_register + 1),
);
{
let mut write = local_registers.write(context.gc_context);
*write.get_mut(0).unwrap() = this.map(|t| t.into()).unwrap_or(Value::Null);
}
let activation_class = if method.method().needs_activation {
let translation_unit = method.translation_unit();
let abc_method = method.method();
let mut dummy_activation = Activation::from_nothing(context.reborrow());
dummy_activation.set_outer(outer);
let activation_class =
Class::for_activation(&mut dummy_activation, translation_unit, abc_method, body)?;
let activation_class_object =
ClassObject::from_class(&mut dummy_activation, activation_class, None)?;
drop(dummy_activation);
Some(activation_class_object)
} else {
None
};
let mut activation = Self {
this,
arguments: None,
is_executing: false,
actions_since_timeout_check: 0,
local_registers,
return_value: None,
scope_stack: ScopeStack::new(),
outer,
caller_domain: outer.domain(),
subclass_object,
activation_class,
context,
};
//Statically verify all non-variadic, provided parameters.
let arguments_list =
activation.resolve_parameters(method.method_name(), user_arguments, signature)?;
{
let mut write = local_registers.write(activation.context.gc_context);
for (i, arg) in arguments_list[0..min(signature.len(), arguments_list.len())]
.iter()
.enumerate()
{
*write.get_mut(1 + i as u32).unwrap() = arg.clone();
}
}
if has_rest_or_args {
let args_array = if method.method().needs_arguments_object {
ArrayStorage::from_args(&arguments_list)
} else if method.method().needs_rest {
if let Some(rest_args) = arguments_list.get(signature.len()..) {
ArrayStorage::from_args(rest_args)
} else {
ArrayStorage::new(0)
}
} else {
unreachable!();
};
let mut args_object = ArrayObject::from_storage(&mut activation, args_array)?;
if method.method().needs_arguments_object {
args_object.set_property(
args_object,
&QName::new(Namespace::public(), "callee").into(),
callee.into(),
&mut activation,
)?;
}
*local_registers
.write(activation.context.gc_context)
.get_mut(1 + num_declared_arguments)
.unwrap() = args_object.into();
}
Ok(activation)
}
/// Construct an activation for the execution of a builtin method.
///
/// It is a logic error to attempt to execute builtins within the same
/// activation as the method or script that called them. You must use this
/// function to construct a new activation for the builtin so that it can
/// properly supercall.
pub fn from_builtin(
context: UpdateContext<'a, 'gc, 'gc_context>,
this: Option<Object<'gc>>,
subclass_object: Option<ClassObject<'gc>>,
outer: ScopeChain<'gc>,
caller_domain: Domain<'gc>,
) -> Result<Self, Error> {
let local_registers = GcCell::allocate(context.gc_context, RegisterSet::new(0));
Ok(Self {
this,
arguments: None,
is_executing: false,
actions_since_timeout_check: 0,
local_registers,
return_value: None,
scope_stack: ScopeStack::new(),
outer,
caller_domain,
subclass_object,
activation_class: None,
context,
})
}
/// Execute a script initializer.
pub fn run_stack_frame_for_script(&mut self, script: Script<'gc>) -> Result<(), Error> {
let init = script.init().0.into_bytecode()?;
self.run_actions(init)?;
Ok(())
}
/// Call the superclass's instance initializer.
pub fn super_init(
&mut self,
receiver: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error> {
let superclass_object = self
.subclass_object()
.and_then(|c| c.superclass_object())
.ok_or_else(|| {
Error::from("Attempted to call super constructor without a superclass.")
});
let superclass_object = superclass_object?;
superclass_object.call_native_init(Some(receiver), args, self, Some(superclass_object))
}
/// Attempts to lock the activation frame for execution.
///
/// If this frame is already executing, that is an error condition.
pub fn lock(&mut self) -> Result<(), Error> {
if self.is_executing {
return Err("Attempted to execute the same frame twice".into());
}
self.is_executing = true;
Ok(())
}
/// Unlock the activation object. This allows future execution to run on it
/// again.
pub fn unlock_execution(&mut self) {
self.is_executing = false;
}
/// Retrieve a local register.
pub fn local_register(&self, id: u32) -> Result<Value<'gc>, Error> {
self.local_registers
.read()
.get(id)
.cloned()
.ok_or_else(|| format!("Out of bounds register read: {}", id).into())
}
/// Set a local register.
///
/// Returns `true` if the set was successful; `false` otherwise
pub fn set_local_register(
&mut self,
id: u32,
value: impl Into<Value<'gc>>,
mc: MutationContext<'gc, '_>,
) -> Result<(), Error> {
if let Some(r) = self.local_registers.write(mc).get_mut(id) {
*r = value.into();
Ok(())
} else {
Err(format!("Out of bounds register write: {}", id).into())
}
}
/// Sets the outer scope of this activation
pub fn set_outer(&mut self, new_outer: ScopeChain<'gc>) {
self.outer = new_outer;
}
/// Creates a new ScopeChain by chaining the current state of this
/// activation's scope stack with the outer scope.
pub fn create_scopechain(&self) -> ScopeChain<'gc> {
self.outer
.chain(self.context.gc_context, self.scope_stack.scopes())
}
/// Returns the domain of the original AS3 caller.
pub fn caller_domain(&self) -> Domain<'gc> {
self.caller_domain
}
/// Returns the global scope of this activation.
///
/// The global scope refers to scope at the bottom of the
/// outer scope. If the outer scope is empty, we use the bottom
/// of the current scope stack instead.
///
/// A return value of `None` implies that both the outer scope, and
/// the current scope stack were both empty.
pub fn global_scope(&self) -> Option<Object<'gc>> {
let outer_scope = self.outer;
outer_scope
.get(0)
.or_else(|| self.scope_stack.get(0))
.map(|scope| scope.values())
}
pub fn avm2(&mut self) -> &mut Avm2<'gc> {
self.context.avm2
}
/// Set the return value.
pub fn set_return_value(&mut self, value: Value<'gc>) {
self.return_value = Some(value);
}
/// Get the base prototype of the object that the currently executing
/// method was retrieved from, if one exists.
pub fn subclass_object(&self) -> Option<ClassObject<'gc>> {
self.subclass_object
}
/// Retrieve a int from the current constant pool.
fn pool_int(
&self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<i32>,
) -> Result<i32, Error> {
value::abc_int(method.translation_unit(), index)
}
/// Retrieve a int from the current constant pool.
fn pool_uint(
&self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<u32>,
) -> Result<u32, Error> {
value::abc_uint(method.translation_unit(), index)
}
/// Retrieve a double from the current constant pool.
fn pool_double(
&self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<f64>,
) -> Result<f64, Error> {
value::abc_double(method.translation_unit(), index)
}
/// Retrieve a string from the current constant pool.
fn pool_string<'b>(
&self,
method: &'b BytecodeMethod<'gc>,
index: Index<String>,
) -> Result<AvmString<'gc>, Error> {
method
.translation_unit()
.pool_string(index.0, self.context.gc_context)
}
/// Retrieve a namespace from the current constant pool.
fn pool_namespace(
&self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcNamespace>,
) -> Result<Namespace<'gc>, Error> {
Namespace::from_abc_namespace(method.translation_unit(), index, self.context.gc_context)
}
/// Retrieve a multiname from the current constant pool.
fn pool_multiname(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<Multiname<'gc>, Error> {
Multiname::from_abc_multiname(method.translation_unit(), index, self)
}
/// Retrieve a static, or non-runtime, multiname from the current constant
/// pool.
///
/// This version of the function treats index 0 as an error condition.
fn pool_multiname_static(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<Multiname<'gc>, Error> {
Multiname::from_abc_multiname_static(
method.translation_unit(),
index,
self.context.gc_context,
)
}
/// Retrieve a static, or non-runtime, multiname from the current constant
/// pool.
///
/// This version of the function treats index 0 as the any-type `*`.
fn pool_multiname_static_any(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<Multiname<'gc>, Error> {
if index.0 == 0 {
Ok(Multiname::any())
} else {
Multiname::from_abc_multiname_static(
method.translation_unit(),
index,
self.context.gc_context,
)
}
}
/// Retrieve a method entry from the current ABC file's method table.
fn table_method(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMethod>,
is_function: bool,
) -> Result<Gc<'gc, BytecodeMethod<'gc>>, Error> {
BytecodeMethod::from_method_index(method.translation_unit(), index, is_function, self)
}
/// Retrieve a class entry from the current ABC file's method table.
fn table_class(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcClass>,
) -> Result<GcCell<'gc, Class<'gc>>, Error> {
method.translation_unit().load_class(index.0, self)
}
pub fn run_actions(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
) -> Result<Value<'gc>, Error> {
let body: Result<_, Error> = method
.body()
.ok_or_else(|| "Cannot execute non-native method without body".into());
let body = body?;
let mut reader = Reader::new(&body.code);
loop {
let result = self.do_next_opcode(method, &mut reader, &body.code);
match result {
Ok(FrameControl::Return(value)) => break Ok(value),
Ok(FrameControl::Continue) => {}
Err(e) => break Err(e),
}
}
}
/// Run a single action from a given action reader.
fn do_next_opcode<'b>(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
self.actions_since_timeout_check += 1;
if self.actions_since_timeout_check >= 2000 {
self.actions_since_timeout_check = 0;
if self.context.update_start.elapsed() >= self.context.max_execution_duration {
return Err(
"A script in this movie has taken too long to execute and has been terminated."
.into(),
);
}
}
let instruction_start = reader.pos(full_data);
let op = reader.read_op();
if let Ok(Some(op)) = op {
avm_debug!(self.avm2(), "Opcode: {:?}", op);
let result = match op {
Op::PushByte { value } => self.op_push_byte(value),
Op::PushDouble { value } => self.op_push_double(method, value),
Op::PushFalse => self.op_push_false(),
Op::PushInt { value } => self.op_push_int(method, value),
Op::PushNamespace { value } => self.op_push_namespace(method, value),
Op::PushNaN => self.op_push_nan(),
Op::PushNull => self.op_push_null(),
Op::PushShort { value } => self.op_push_short(value),
Op::PushString { value } => self.op_push_string(method, value),
Op::PushTrue => self.op_push_true(),
Op::PushUint { value } => self.op_push_uint(method, value),
Op::PushUndefined => self.op_push_undefined(),
Op::Pop => self.op_pop(),
Op::Dup => self.op_dup(),
Op::GetLocal { index } => self.op_get_local(index),
Op::SetLocal { index } => self.op_set_local(index),
Op::Kill { index } => self.op_kill(index),
Op::Call { num_args } => self.op_call(num_args),
Op::CallMethod { index, num_args } => self.op_call_method(index, num_args),
Op::CallProperty { index, num_args } => {
self.op_call_property(method, index, num_args)
}
Op::CallPropLex { index, num_args } => {
self.op_call_prop_lex(method, index, num_args)
}
Op::CallPropVoid { index, num_args } => {
self.op_call_prop_void(method, index, num_args)
}
Op::CallStatic { index, num_args } => self.op_call_static(method, index, num_args),
Op::CallSuper { index, num_args } => self.op_call_super(method, index, num_args),
Op::CallSuperVoid { index, num_args } => {
self.op_call_super_void(method, index, num_args)
}
Op::ReturnValue => self.op_return_value(),
Op::ReturnVoid => self.op_return_void(),
Op::GetProperty { index } => self.op_get_property(method, index),
Op::SetProperty { index } => self.op_set_property(method, index),
Op::InitProperty { index } => self.op_init_property(method, index),
Op::DeleteProperty { index } => self.op_delete_property(method, index),
Op::GetSuper { index } => self.op_get_super(method, index),
Op::SetSuper { index } => self.op_set_super(method, index),
Op::In => self.op_in(),
Op::PushScope => self.op_push_scope(),
Op::PushWith => self.op_push_with(),
Op::PopScope => self.op_pop_scope(),
Op::GetOuterScope { index } => self.op_get_outer_scope(index),
Op::GetScopeObject { index } => self.op_get_scope_object(index),
Op::GetGlobalScope => self.op_get_global_scope(),
Op::FindProperty { index } => self.op_find_property(method, index),
Op::FindPropStrict { index } => self.op_find_prop_strict(method, index),
Op::GetLex { index } => self.op_get_lex(method, index),
Op::GetSlot { index } => self.op_get_slot(index),
Op::SetSlot { index } => self.op_set_slot(index),
Op::GetGlobalSlot { index } => self.op_get_global_slot(index),
Op::SetGlobalSlot { index } => self.op_set_global_slot(index),
Op::Construct { num_args } => self.op_construct(num_args),
Op::ConstructProp { index, num_args } => {
self.op_construct_prop(method, index, num_args)
}
Op::ConstructSuper { num_args } => self.op_construct_super(num_args),
Op::NewActivation => self.op_new_activation(),
Op::NewObject { num_args } => self.op_new_object(num_args),
Op::NewFunction { index } => self.op_new_function(method, index),
Op::NewClass { index } => self.op_new_class(method, index),
Op::ApplyType { num_types } => self.op_apply_type(num_types),
Op::NewArray { num_args } => self.op_new_array(num_args),
Op::CoerceA => self.op_coerce_a(),
Op::CoerceB => self.op_coerce_b(),
Op::CoerceD => self.op_coerce_d(),
Op::CoerceI => self.op_coerce_i(),
Op::CoerceO => self.op_coerce_o(),
Op::CoerceS => self.op_coerce_s(),
Op::CoerceU => self.op_coerce_u(),
Op::ConvertB => self.op_convert_b(),
Op::ConvertI => self.op_convert_i(),
Op::ConvertD => self.op_convert_d(),
Op::ConvertO => self.op_convert_o(),
Op::ConvertU => self.op_convert_u(),
Op::ConvertS => self.op_convert_s(),
Op::Add => self.op_add(),
Op::AddI => self.op_add_i(),
Op::BitAnd => self.op_bitand(),
Op::BitNot => self.op_bitnot(),
Op::BitOr => self.op_bitor(),
Op::BitXor => self.op_bitxor(),
Op::DecLocal { index } => self.op_declocal(index),
Op::DecLocalI { index } => self.op_declocal_i(index),
Op::Decrement => self.op_decrement(),
Op::DecrementI => self.op_decrement_i(),
Op::Divide => self.op_divide(),
Op::IncLocal { index } => self.op_inclocal(index),
Op::IncLocalI { index } => self.op_inclocal_i(index),
Op::Increment => self.op_increment(),
Op::IncrementI => self.op_increment_i(),
Op::LShift => self.op_lshift(),
Op::Modulo => self.op_modulo(),
Op::Multiply => self.op_multiply(),
Op::MultiplyI => self.op_multiply_i(),
Op::Negate => self.op_negate(),
Op::NegateI => self.op_negate_i(),
Op::RShift => self.op_rshift(),
Op::Subtract => self.op_subtract(),
Op::SubtractI => self.op_subtract_i(),
Op::Swap => self.op_swap(),
Op::URShift => self.op_urshift(),
Op::Jump { offset } => self.op_jump(offset, reader, full_data),
Op::IfTrue { offset } => self.op_if_true(offset, reader, full_data),
Op::IfFalse { offset } => self.op_if_false(offset, reader, full_data),
Op::IfStrictEq { offset } => self.op_if_strict_eq(offset, reader, full_data),
Op::IfStrictNe { offset } => self.op_if_strict_ne(offset, reader, full_data),
Op::IfEq { offset } => self.op_if_eq(offset, reader, full_data),
Op::IfNe { offset } => self.op_if_ne(offset, reader, full_data),
Op::IfGe { offset } => self.op_if_ge(offset, reader, full_data),
Op::IfGt { offset } => self.op_if_gt(offset, reader, full_data),
Op::IfLe { offset } => self.op_if_le(offset, reader, full_data),
Op::IfLt { offset } => self.op_if_lt(offset, reader, full_data),
Op::IfNge { offset } => self.op_if_nge(offset, reader, full_data),
Op::IfNgt { offset } => self.op_if_ngt(offset, reader, full_data),
Op::IfNle { offset } => self.op_if_nle(offset, reader, full_data),
Op::IfNlt { offset } => self.op_if_nlt(offset, reader, full_data),
Op::StrictEquals => self.op_strict_equals(),
Op::Equals => self.op_equals(),
Op::GreaterEquals => self.op_greater_equals(),
Op::GreaterThan => self.op_greater_than(),
Op::LessEquals => self.op_less_equals(),
Op::LessThan => self.op_less_than(),
Op::Nop => self.op_nop(),
Op::Not => self.op_not(),
Op::HasNext => self.op_has_next(),
Op::HasNext2 {
object_register,
index_register,
} => self.op_has_next_2(object_register, index_register),
Op::NextName => self.op_next_name(),
Op::NextValue => self.op_next_value(),
Op::IsType { index } => self.op_is_type(method, index),
Op::IsTypeLate => self.op_is_type_late(),
Op::AsType { type_name } => self.op_as_type(method, type_name),
Op::AsTypeLate => self.op_as_type_late(),
Op::InstanceOf => self.op_instance_of(),
Op::Label => Ok(FrameControl::Continue),
Op::Debug {
is_local_register,
register_name,
register,
} => self.op_debug(method, is_local_register, register_name, register),
Op::DebugFile { file_name } => self.op_debug_file(method, file_name),
Op::DebugLine { line_num } => self.op_debug_line(line_num),
Op::Bkpt => self.op_bkpt(),
Op::BkptLine { line_num } => self.op_bkpt_line(line_num),
Op::Timestamp => self.op_timestamp(),
Op::TypeOf => self.op_type_of(),
Op::EscXAttr => self.op_esc_xattr(),
Op::EscXElem => self.op_esc_elem(),
Op::LookupSwitch {
default_offset,
case_offsets,
} => self.op_lookup_switch(
default_offset,
&case_offsets,
instruction_start,
reader,
full_data,
),
Op::Coerce { index } => self.op_coerce(method, index),
Op::Si8 => self.op_si8(),
Op::Si16 => self.op_si16(),
Op::Si32 => self.op_si32(),
Op::Sf32 => self.op_sf32(),
Op::Sf64 => self.op_sf64(),
Op::Li8 => self.op_li8(),
Op::Li16 => self.op_li16(),
Op::Li32 => self.op_li32(),
Op::Lf32 => self.op_lf32(),
Op::Lf64 => self.op_lf64(),
Op::Sxi1 => self.op_sxi1(),
Op::Sxi8 => self.op_sxi8(),
Op::Sxi16 => self.op_sxi16(),
_ => self.unknown_op(op),
};
if let Err(e) = result {
log::error!("AVM2 error: {}", e);
return Err(e);
}
result
} else if let Ok(None) = op {
log::error!("Unknown opcode!");
Err("Unknown opcode!".into())
} else if let Err(e) = op {
log::error!("Parse error: {:?}", e);
Err(e.into())
} else {
unreachable!();
}
}
fn unknown_op(&mut self, op: swf::avm2::types::Op) -> Result<FrameControl<'gc>, Error> {
log::error!("Unknown AVM2 opcode: {:?}", op);
Err("Unknown op".into())
}
fn op_push_byte(&mut self, value: u8) -> Result<FrameControl<'gc>, Error> {
//TODO: Adobe Animate CC appears to generate signed byte values, and
//JPEXS appears to take them.
self.context.avm2.push(value as i8 as i32);
Ok(FrameControl::Continue)
}
fn op_push_double(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
value: Index<f64>,
) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(self.pool_double(method, value)?);
Ok(FrameControl::Continue)
}
fn op_push_false(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(false);
Ok(FrameControl::Continue)
}
fn op_push_int(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
value: Index<i32>,
) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(self.pool_int(method, value)?);
Ok(FrameControl::Continue)
}
fn op_push_namespace(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
value: Index<AbcNamespace>,
) -> Result<FrameControl<'gc>, Error> {
let ns = self.pool_namespace(method, value)?;
let ns_object = NamespaceObject::from_namespace(self, ns)?;
self.context.avm2.push(ns_object);
Ok(FrameControl::Continue)
}
fn op_push_nan(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(f64::NAN);
Ok(FrameControl::Continue)
}
fn op_push_null(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(Value::Null);
Ok(FrameControl::Continue)
}
fn op_push_short(&mut self, value: i16) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_push_string(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
value: Index<String>,
) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(self.pool_string(&method, value)?);
Ok(FrameControl::Continue)
}
fn op_push_true(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(true);
Ok(FrameControl::Continue)
}
fn op_push_uint(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
value: Index<u32>,
) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(self.pool_uint(method, value)?);
Ok(FrameControl::Continue)
}
fn op_push_undefined(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(Value::Undefined);
Ok(FrameControl::Continue)
}
fn op_pop(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.pop();
Ok(FrameControl::Continue)
}
fn op_dup(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(
self.context
.avm2
.stack
.last()
.cloned()
.unwrap_or(Value::Undefined),
);
Ok(FrameControl::Continue)
}
fn op_get_local(&mut self, register_index: u32) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(self.local_register(register_index)?);
Ok(FrameControl::Continue)
}
fn op_set_local(&mut self, register_index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
self.set_local_register(register_index, value, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_kill(&mut self, register_index: u32) -> Result<FrameControl<'gc>, Error> {
self.set_local_register(register_index, Value::Undefined, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_call(&mut self, arg_count: u32) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let receiver = self.context.avm2.pop().coerce_to_object(self).ok();
let function = self.context.avm2.pop().coerce_to_object(self)?;
let superclass_object = receiver.and_then(|r| r.instance_of());
let value = function.call(receiver, &args, self, superclass_object)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_call_method(
&mut self,
index: Index<AbcMethod>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
let function: Result<Object<'gc>, Error> = receiver
.get_method(index.0)
.ok_or_else(|| format!("Object method {} does not exist", index.0).into());
let superclass_object = receiver.instance_of();
let value = function?.call(Some(receiver), &args, self, superclass_object)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_call_property(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let multiname = self.pool_multiname(method, index)?;
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
let value = receiver.call_property(&multiname, &args, self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_call_prop_lex(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let multiname = self.pool_multiname(method, index)?;
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
let function = receiver
.get_property(receiver, &multiname, self)?
.coerce_to_object(self)?;
let value = function.call(None, &args, self, None)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_call_prop_void(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let multiname = self.pool_multiname(method, index)?;
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
receiver.call_property(&multiname, &args, self)?;
Ok(FrameControl::Continue)
}
fn op_call_static(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMethod>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
let method = self.table_method(method, index, false)?;
// TODO: What scope should the function be executed with?
let scope = self.create_scopechain();
let function = FunctionObject::from_method(self, method.into(), scope, None);
let value = function.call(Some(receiver), &args, self, receiver.instance_of())?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_call_super(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let multiname = self.pool_multiname(method, index)?;
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
let superclass_object = self
.subclass_object()
.and_then(|bc| bc.superclass_object())
.ok_or_else(|| Error::from("Attempted to call super method without a superclass."))?;
let value = superclass_object.call_super(&multiname, receiver, &args, self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_call_super_void(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let multiname = self.pool_multiname(method, index)?;
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
let superclass_object = self
.subclass_object()
.and_then(|bc| bc.superclass_object())
.ok_or_else(|| Error::from("Attempted to call super method without a superclass."))?;
superclass_object.call_super(&multiname, receiver, &args, self)?;
Ok(FrameControl::Continue)
}
fn op_return_value(&mut self) -> Result<FrameControl<'gc>, Error> {
let return_value = self.context.avm2.pop();
Ok(FrameControl::Return(return_value))
}
fn op_return_void(&mut self) -> Result<FrameControl<'gc>, Error> {
Ok(FrameControl::Return(Value::Undefined))
}
fn op_get_property(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let txunit = method.translation_unit();
let abc = txunit.abc();
let abc_multiname = Multiname::resolve_multiname_index(&abc, index.clone())?;
let (multiname, object) = if matches!(
abc_multiname,
AbcMultiname::MultinameL { .. } | AbcMultiname::MultinameLA { .. }
) {
// `MultinameL` is the only form of multiname that allows fast-path
// or alternate-path lookups based on the local name *value*,
// rather than it's string representation.
let name_value = self.context.avm2.pop();
let object = self.context.avm2.pop().coerce_to_object(self)?;
if !name_value.is_primitive() {
if let Some(dictionary) = object.as_dictionary_object() {
let value =
dictionary.get_property_by_object(name_value.coerce_to_object(self)?);
self.context.avm2.push(value);
return Ok(FrameControl::Continue);
}
}
(
Multiname::from_multiname_late(txunit, abc_multiname, name_value, self)?,
object,
)
} else {
let multiname = self.pool_multiname(method, index)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
(multiname, object)
};
let value = object.get_property(object, &multiname, self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_set_property(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let txunit = method.translation_unit();
let abc = txunit.abc();
let abc_multiname = Multiname::resolve_multiname_index(&abc, index.clone())?;
let (multiname, mut object) = if matches!(
abc_multiname,
AbcMultiname::MultinameL { .. } | AbcMultiname::MultinameLA { .. }
) {
// `MultinameL` is the only form of multiname that allows fast-path
// or alternate-path lookups based on the local name *value*,
// rather than it's string representation.
let name_value = self.context.avm2.pop();
let object = self.context.avm2.pop().coerce_to_object(self)?;
if !name_value.is_primitive() {
if let Some(dictionary) = object.as_dictionary_object() {
dictionary.set_property_by_object(
name_value.coerce_to_object(self)?,
value,
self.context.gc_context,
);
return Ok(FrameControl::Continue);
}
}
(
Multiname::from_multiname_late(txunit, abc_multiname, name_value, self)?,
object,
)
} else {
let multiname = self.pool_multiname(method, index)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
(multiname, object)
};
object.set_property(object, &multiname, value, self)?;
Ok(FrameControl::Continue)
}
fn op_init_property(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let multiname = self.pool_multiname(method, index)?;
let mut object = self.context.avm2.pop().coerce_to_object(self)?;
object.init_property(object, &multiname, value, self)?;
Ok(FrameControl::Continue)
}
fn op_delete_property(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let txunit = method.translation_unit();
let abc = txunit.abc();
let abc_multiname = Multiname::resolve_multiname_index(&abc, index.clone())?;
let (multiname, object) = if matches!(
abc_multiname,
AbcMultiname::MultinameL { .. } | AbcMultiname::MultinameLA { .. }
) {
// `MultinameL` is the only form of multiname that allows fast-path
// or alternate-path lookups based on the local name *value*,
// rather than it's string representation.
let name_value = self.context.avm2.pop();
let object = self.context.avm2.pop().coerce_to_object(self)?;
if !name_value.is_primitive() {
if let Some(dictionary) = object.as_dictionary_object() {
dictionary.delete_property_by_object(
name_value.coerce_to_object(self)?,
self.context.gc_context,
);
self.context.avm2.push(true);
return Ok(FrameControl::Continue);
}
}
(
Multiname::from_multiname_late(txunit, abc_multiname, name_value, self)?,
object,
)
} else {
let multiname = self.pool_multiname(method, index)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
(multiname, object)
};
if let Some(name) = object.resolve_multiname(&multiname)? {
self.context
.avm2
.push(object.delete_property(self.context.gc_context, &name))
} else {
// Unknown properties on a dynamic class delete successfully.
self.context.avm2.push(
!object
.instance_of_class_definition()
.map(|c| c.read().is_sealed())
.unwrap_or(false),
)
}
Ok(FrameControl::Continue)
}
fn op_get_super(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let multiname = self.pool_multiname(method, index)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
let superclass_object = self
.subclass_object()
.and_then(|bc| bc.superclass_object())
.ok_or_else(|| Error::from("Attempted to call super method without a superclass."))?;
let value = superclass_object.get_super(&multiname, object, self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_set_super(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let multiname = self.pool_multiname(method, index)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
let superclass_object = self
.subclass_object()
.and_then(|bc| bc.superclass_object())
.ok_or_else(|| Error::from("Attempted to call super method without a superclass."))?;
superclass_object.set_super(&multiname, value, object, self)?;
Ok(FrameControl::Continue)
}
fn op_in(&mut self) -> Result<FrameControl<'gc>, Error> {
let obj = self.context.avm2.pop().coerce_to_object(self)?;
let name_value = self.context.avm2.pop();
if let Some(dictionary) = obj.as_dictionary_object() {
if !name_value.is_primitive() {
let obj_key = name_value.coerce_to_object(self)?;
self.context
.avm2
.push(dictionary.has_property_by_object(obj_key));
return Ok(FrameControl::Continue);
}
}
let name = name_value.coerce_to_string(self)?;
let qname = QName::new(Namespace::public(), name);
let has_prop = obj.has_property(&qname)?;
self.context.avm2.push(has_prop);
Ok(FrameControl::Continue)
}
fn op_push_scope(&mut self) -> Result<FrameControl<'gc>, Error> {
let object = self.context.avm2.pop().coerce_to_object(self)?;
self.scope_stack.push(Scope::new(object));
Ok(FrameControl::Continue)
}
fn op_push_with(&mut self) -> Result<FrameControl<'gc>, Error> {
let object = self.context.avm2.pop().coerce_to_object(self)?;
self.scope_stack.push(Scope::new_with(object));
Ok(FrameControl::Continue)
}
fn op_pop_scope(&mut self) -> Result<FrameControl<'gc>, Error> {
self.scope_stack.pop();
Ok(FrameControl::Continue)
}
fn op_get_outer_scope(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let scope = self.outer.get(index as usize);
if let Some(scope) = scope {
self.context.avm2.push(scope.values());
} else {
self.context.avm2.push(Value::Undefined);
};
Ok(FrameControl::Continue)
}
fn op_get_scope_object(&mut self, index: u8) -> Result<FrameControl<'gc>, Error> {
let scope = self.scope_stack.get(index as usize);
if let Some(scope) = scope {
self.context.avm2.push(scope.values());
} else {
self.context.avm2.push(Value::Undefined);
};
Ok(FrameControl::Continue)
}
fn op_get_global_scope(&mut self) -> Result<FrameControl<'gc>, Error> {
self.context.avm2.push(
self.global_scope()
.map(|gs| gs.into())
.unwrap_or(Value::Null),
);
Ok(FrameControl::Continue)
}
fn op_find_property(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let multiname = self.pool_multiname(method, index)?;
avm_debug!(self.context.avm2, "Resolving {:?}", multiname);
let result = self
.find_definition(&multiname)?
.or_else(|| self.global_scope());
self.context
.avm2
.push(result.map(|o| o.into()).unwrap_or(Value::Undefined));
Ok(FrameControl::Continue)
}
fn op_find_prop_strict(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let multiname = self.pool_multiname(method, index)?;
avm_debug!(self.context.avm2, "Resolving {:?}", multiname);
let found: Result<Object<'gc>, Error> = self
.find_definition(&multiname)?
.ok_or_else(|| format!("Property does not exist: {:?}", multiname).into());
let result: Value<'gc> = found?.into();
self.context.avm2.push(result);
Ok(FrameControl::Continue)
}
fn op_get_lex(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let multiname = self.pool_multiname_static(method, index)?;
avm_debug!(self.avm2(), "Resolving {:?}", multiname);
let found: Result<Value<'gc>, Error> = self
.resolve_definition(&multiname)?
.ok_or_else(|| format!("Property does not exist: {:?}", multiname).into());
self.context.avm2.push(found?);
Ok(FrameControl::Continue)
}
fn op_get_slot(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let object = self.context.avm2.pop().coerce_to_object(self)?;
let value = object.get_slot(index)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_set_slot(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let object = self.context.avm2.pop().coerce_to_object(self)?;
object.set_slot(index, value, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_get_global_slot(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self
.global_scope()
.map(|global| global.get_slot(index))
.transpose()?
.unwrap_or(Value::Undefined);
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_set_global_slot(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
self.global_scope()
.map(|global| global.set_slot(index, value, self.context.gc_context))
.transpose()?;
Ok(FrameControl::Continue)
}
fn op_construct(&mut self, arg_count: u32) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let ctor = self.context.avm2.pop().coerce_to_object(self)?;
let object = ctor.construct(self, &args)?;
self.context.avm2.push(object);
Ok(FrameControl::Continue)
}
fn op_construct_prop(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
arg_count: u32,
) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let multiname = self.pool_multiname(method, index)?;
let source = self.context.avm2.pop().coerce_to_object(self)?;
let object = source.construct_prop(&multiname, &args, self)?;
self.context.avm2.push(object);
Ok(FrameControl::Continue)
}
fn op_construct_super(&mut self, arg_count: u32) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(arg_count);
let receiver = self.context.avm2.pop().coerce_to_object(self)?;
self.super_init(receiver, &args)?;
Ok(FrameControl::Continue)
}
fn op_new_activation(&mut self) -> Result<FrameControl<'gc>, Error> {
let instance = if let Some(activation_class) = self.activation_class {
activation_class.construct(self, &[])?
} else {
ScriptObject::bare_object(self.context.gc_context)
};
self.context.avm2.push(instance);
Ok(FrameControl::Continue)
}
fn op_new_object(&mut self, num_args: u32) -> Result<FrameControl<'gc>, Error> {
let mut object = self.context.avm2.classes().object.construct(self, &[])?;
for _ in 0..num_args {
let value = self.context.avm2.pop();
let name = self.context.avm2.pop();
object.set_property(
object,
&QName::dynamic_name(name.coerce_to_string(self)?).into(),
value,
self,
)?;
}
self.context.avm2.push(object);
Ok(FrameControl::Continue)
}
fn op_new_function(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMethod>,
) -> Result<FrameControl<'gc>, Error> {
let method_entry = self.table_method(method, index, true)?;
let scope = self.create_scopechain();
let new_fn = FunctionObject::from_function(self, method_entry.into(), scope)?;
self.context.avm2.push(new_fn);
Ok(FrameControl::Continue)
}
fn op_new_class(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcClass>,
) -> Result<FrameControl<'gc>, Error> {
let base_value = self.context.avm2.pop();
let base_class = match base_value {
Value::Object(o) => match o.as_class_object() {
Some(cls) => Some(cls),
None => return Err("Base class for new class is not a class.".into()),
},
Value::Null => None,
_ => return Err("Base class for new class is not Object or null.".into()),
};
let class_entry = self.table_class(method, index)?;
let new_class = ClassObject::from_class(self, class_entry, base_class)?;
self.context.avm2.push(new_class);
Ok(FrameControl::Continue)
}
fn op_apply_type(&mut self, num_types: u32) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(num_types);
let base = self.context.avm2.pop().coerce_to_object(self)?;
if args.len() > 1 {
return Err(format!(
"VerifyError: Cannot specialize classes with more than one parameter, {} given",
args.len()
)
.into());
}
let applied = base.apply(self, &args[..])?;
self.context.avm2.push(applied);
Ok(FrameControl::Continue)
}
fn op_new_array(&mut self, num_args: u32) -> Result<FrameControl<'gc>, Error> {
let args = self.context.avm2.pop_args(num_args);
let array = ArrayStorage::from_args(&args[..]);
let array_obj = ArrayObject::from_storage(self, array)?;
self.context.avm2.push(array_obj);
Ok(FrameControl::Continue)
}
fn op_coerce_a(&mut self) -> Result<FrameControl<'gc>, Error> {
Ok(FrameControl::Continue)
}
fn op_coerce_b(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_boolean();
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_coerce_d(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_coerce_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_coerce_o(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let coerced = match value {
Value::Undefined | Value::Null => Value::Null,
_ => value.coerce_to_object(self)?.into(),
};
self.context.avm2.push(coerced);
Ok(FrameControl::Continue)
}
fn op_coerce_s(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let coerced = match value {
Value::Undefined | Value::Null => Value::Null,
_ => value.coerce_to_string(self)?.into(),
};
self.context.avm2.push(coerced);
Ok(FrameControl::Continue)
}
fn op_coerce_u(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_u32(self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_convert_b(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_boolean();
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_convert_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(Value::Number(value.into()));
Ok(FrameControl::Continue)
}
fn op_convert_d(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(Value::Number(value));
Ok(FrameControl::Continue)
}
fn op_convert_o(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_object(self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_convert_u(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_u32(self)?;
self.context.avm2.push(Value::Number(value.into()));
Ok(FrameControl::Continue)
}
fn op_convert_s(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_string(self)?;
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_add(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
// TODO: Special handling required for `Date` and ECMA-357/E4X `XML`
let sum_value = match (value1, value2) {
(Value::Number(n1), Value::Number(n2)) => Value::Number(n1 + n2),
(Value::String(s), value2) => {
let mut out_s = s.to_string();
out_s.push_str(&value2.coerce_to_string(self)?);
Value::String(AvmString::new(self.context.gc_context, out_s))
}
(value1, Value::String(s)) => {
let mut out_s = value1.coerce_to_string(self)?.to_string();
out_s.push_str(&s);
Value::String(AvmString::new(self.context.gc_context, out_s))
}
(value1, value2) => {
let prim_value1 = value1.coerce_to_primitive(None, self)?;
let prim_value2 = value2.coerce_to_primitive(None, self)?;
match (prim_value1, prim_value2) {
(Value::String(s), value2) => {
let mut out_s = s.to_string();
out_s.push_str(&value2.coerce_to_string(self)?);
Value::String(AvmString::new(self.context.gc_context, out_s))
}
(value1, Value::String(s)) => {
let mut out_s = value1.coerce_to_string(self)?.to_string();
out_s.push_str(&s);
Value::String(AvmString::new(self.context.gc_context, out_s))
}
(value1, value2) => Value::Number(
value1.coerce_to_number(self)? + value2.coerce_to_number(self)?,
),
}
}
};
self.context.avm2.push(sum_value);
Ok(FrameControl::Continue)
}
fn op_add_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_i32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 + value2);
Ok(FrameControl::Continue)
}
fn op_bitand(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_i32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 & value2);
Ok(FrameControl::Continue)
}
fn op_bitnot(&mut self) -> Result<FrameControl<'gc>, Error> {
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(!value1);
Ok(FrameControl::Continue)
}
fn op_bitor(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_i32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 | value2);
Ok(FrameControl::Continue)
}
fn op_bitxor(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_i32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 ^ value2);
Ok(FrameControl::Continue)
}
fn op_declocal(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.local_register(index)?.coerce_to_number(self)?;
self.set_local_register(index, value - 1.0, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_declocal_i(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.local_register(index)?.coerce_to_i32(self)?;
self.set_local_register(index, value - 1, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_decrement(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value - 1.0);
Ok(FrameControl::Continue)
}
fn op_decrement_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value - 1);
Ok(FrameControl::Continue)
}
fn op_divide(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_number(self)?;
let value1 = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value1 / value2);
Ok(FrameControl::Continue)
}
fn op_inclocal(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.local_register(index)?.coerce_to_number(self)?;
self.set_local_register(index, value + 1.0, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_inclocal_i(&mut self, index: u32) -> Result<FrameControl<'gc>, Error> {
let value = self.local_register(index)?.coerce_to_i32(self)?;
self.set_local_register(index, value + 1, self.context.gc_context)?;
Ok(FrameControl::Continue)
}
fn op_increment(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value + 1.0);
Ok(FrameControl::Continue)
}
fn op_increment_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value + 1);
Ok(FrameControl::Continue)
}
fn op_lshift(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_u32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 << (value2 & 0x1F));
Ok(FrameControl::Continue)
}
fn op_modulo(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_number(self)?;
let value1 = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value1 % value2);
Ok(FrameControl::Continue)
}
fn op_multiply(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_number(self)?;
let value1 = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value1 * value2);
Ok(FrameControl::Continue)
}
fn op_multiply_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_i32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 * value2);
Ok(FrameControl::Continue)
}
fn op_negate(&mut self) -> Result<FrameControl<'gc>, Error> {
let value1 = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(-value1);
Ok(FrameControl::Continue)
}
fn op_negate_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(-value1);
Ok(FrameControl::Continue)
}
fn op_rshift(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_u32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 >> (value2 & 0x1F));
Ok(FrameControl::Continue)
}
fn op_subtract(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_number(self)?;
let value1 = self.context.avm2.pop().coerce_to_number(self)?;
self.context.avm2.push(value1 - value2);
Ok(FrameControl::Continue)
}
fn op_subtract_i(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_i32(self)?;
let value1 = self.context.avm2.pop().coerce_to_i32(self)?;
self.context.avm2.push(value1 - value2);
Ok(FrameControl::Continue)
}
fn op_swap(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
self.context.avm2.push(value2);
self.context.avm2.push(value1);
Ok(FrameControl::Continue)
}
fn op_urshift(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop().coerce_to_u32(self)?;
let value1 = self.context.avm2.pop().coerce_to_u32(self)?;
self.context.avm2.push(value1 >> (value2 & 0x1F));
Ok(FrameControl::Continue)
}
fn op_jump<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
reader.seek(full_data, offset);
Ok(FrameControl::Continue)
}
fn op_if_true<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_boolean();
if value {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_false<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_boolean();
if !value {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_strict_eq<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value1 == value2 {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_strict_ne<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value1 != value2 {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_eq<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value1.abstract_eq(&value2, self)? {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_ne<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if !value1.abstract_eq(&value2, self)? {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_ge<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value1.abstract_lt(&value2, self)? == Some(false) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_gt<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value2.abstract_lt(&value1, self)? == Some(true) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_le<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value2.abstract_lt(&value1, self)? == Some(false) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_lt<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value1.abstract_lt(&value2, self)? == Some(true) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_nge<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value1.abstract_lt(&value2, self)?.unwrap_or(true) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_ngt<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if !value2.abstract_lt(&value1, self)?.unwrap_or(false) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_nle<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if value2.abstract_lt(&value1, self)?.unwrap_or(true) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_if_nlt<'b>(
&mut self,
offset: i32,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
if !value1.abstract_lt(&value2, self)?.unwrap_or(false) {
reader.seek(full_data, offset);
}
Ok(FrameControl::Continue)
}
fn op_strict_equals(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
self.context.avm2.push(value1 == value2);
Ok(FrameControl::Continue)
}
fn op_equals(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
let result = value1.abstract_eq(&value2, self)?;
self.context.avm2.push(result);
Ok(FrameControl::Continue)
}
fn op_greater_equals(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
let result = !value1.abstract_lt(&value2, self)?.unwrap_or(true);
self.context.avm2.push(result);
Ok(FrameControl::Continue)
}
fn op_greater_than(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
let result = value2.abstract_lt(&value1, self)?.unwrap_or(false);
self.context.avm2.push(result);
Ok(FrameControl::Continue)
}
fn op_less_equals(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
let result = !value2.abstract_lt(&value1, self)?.unwrap_or(true);
self.context.avm2.push(result);
Ok(FrameControl::Continue)
}
fn op_less_than(&mut self) -> Result<FrameControl<'gc>, Error> {
let value2 = self.context.avm2.pop();
let value1 = self.context.avm2.pop();
let result = value1.abstract_lt(&value2, self)?.unwrap_or(false);
self.context.avm2.push(result);
Ok(FrameControl::Continue)
}
fn op_nop(&mut self) -> Result<FrameControl<'gc>, Error> {
Ok(FrameControl::Continue)
}
fn op_not(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_boolean();
self.context.avm2.push(!value);
Ok(FrameControl::Continue)
}
fn op_has_next(&mut self) -> Result<FrameControl<'gc>, Error> {
let cur_index = self.context.avm2.pop().coerce_to_u32(self)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
let next_index = cur_index + 1;
if object.get_enumerant_name(next_index).is_some() {
self.context.avm2.push(next_index);
} else {
self.context.avm2.push(0.0);
}
Ok(FrameControl::Continue)
}
fn op_has_next_2(
&mut self,
object_register: u32,
index_register: u32,
) -> Result<FrameControl<'gc>, Error> {
let cur_index = self.local_register(index_register)?.coerce_to_u32(self)?;
let mut object = Some(
self.local_register(object_register)?
.coerce_to_object(self)?,
);
let mut next_index = cur_index + 1;
while let Some(cur_object) = object {
if cur_object.get_enumerant_name(next_index).is_none() {
next_index = 1;
object = cur_object.proto();
} else {
break;
}
}
if object.is_none() {
next_index = 0;
}
self.context.avm2.push(next_index != 0);
self.set_local_register(index_register, next_index, self.context.gc_context)?;
self.set_local_register(
object_register,
object.map(|v| v.into()).unwrap_or(Value::Null),
self.context.gc_context,
)?;
Ok(FrameControl::Continue)
}
fn op_next_name(&mut self) -> Result<FrameControl<'gc>, Error> {
let cur_index = self.context.avm2.pop().coerce_to_number(self)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
let name = object.get_enumerant_name(cur_index as u32);
self.context.avm2.push(name.unwrap_or(Value::Undefined));
Ok(FrameControl::Continue)
}
fn op_next_value(&mut self) -> Result<FrameControl<'gc>, Error> {
let cur_index = self.context.avm2.pop().coerce_to_number(self)?;
let object = self.context.avm2.pop().coerce_to_object(self)?;
let name = object.get_enumerant_name(cur_index as u32);
let value = if let Some(name) = name {
let name = name.coerce_to_string(self)?;
object.get_property(object, &QName::dynamic_name(name).into(), self)?
} else {
Value::Undefined
};
self.context.avm2.push(value);
Ok(FrameControl::Continue)
}
fn op_is_type(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
type_name_index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let multiname = self.pool_multiname_static(method, type_name_index)?;
let found: Result<Value<'gc>, Error> =
self.resolve_definition(&multiname)?.ok_or_else(|| {
format!(
"Attempted to check against nonexistent type {:?}",
multiname
)
.into()
});
let type_object = found?
.coerce_to_object(self)?
.as_class_object()
.ok_or_else(|| {
Error::from(format!(
"Attempted to check against nonexistent type {:?}",
multiname
))
})?;
let is_instance_of = value.is_of_type(self, type_object)?;
self.context.avm2.push(is_instance_of);
Ok(FrameControl::Continue)
}
fn op_is_type_late(&mut self) -> Result<FrameControl<'gc>, Error> {
let type_object = self.context.avm2.pop().coerce_to_object(self)?;
let value = self.context.avm2.pop();
let type_object = type_object
.as_class_object()
.ok_or_else(|| Error::from("Attempted to check against non-type"))?;
let is_instance_of = value.is_of_type(self, type_object)?;
self.context.avm2.push(is_instance_of);
Ok(FrameControl::Continue)
}
fn op_as_type(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
type_name_index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop().coerce_to_object(self)?;
let multiname = self.pool_multiname_static(method, type_name_index)?;
let found: Result<Value<'gc>, Error> =
self.resolve_definition(&multiname)?.ok_or_else(|| {
format!(
"Attempted to check against nonexistent type {:?}",
multiname
)
.into()
});
let class = found?
.coerce_to_object(self)?
.as_class_object()
.ok_or_else(|| {
Error::from("TypeError: The right-hand side of operator must be a class.")
})?;
if value.is_of_type(class, self)? {
self.context.avm2.push(value);
} else {
self.context.avm2.push(Value::Null);
}
Ok(FrameControl::Continue)
}
fn op_as_type_late(&mut self) -> Result<FrameControl<'gc>, Error> {
let class = self.context.avm2.pop().coerce_to_object(self)?;
let value = self.context.avm2.pop().coerce_to_object(self)?;
let class = class.as_class_object().ok_or_else(|| {
Error::from("TypeError: The right-hand side of operator must be a class.")
})?;
if value.is_of_type(class, self)? {
self.context.avm2.push(value);
} else {
self.context.avm2.push(Value::Null);
}
Ok(FrameControl::Continue)
}
fn op_instance_of(&mut self) -> Result<FrameControl<'gc>, Error> {
let type_object = self.context.avm2.pop().coerce_to_object(self)?;
let value = self.context.avm2.pop().coerce_to_object(self).ok();
if let Some(value) = value {
let is_instance_of = value.is_instance_of(self, type_object)?;
self.context.avm2.push(is_instance_of);
} else {
self.context.avm2.push(false);
}
Ok(FrameControl::Continue)
}
fn op_type_of(&mut self) -> Result<FrameControl<'gc>, Error> {
let value = self.context.avm2.pop();
let type_name = match value {
Value::Undefined => "undefined",
Value::Null => "object",
Value::Bool(_) => "boolean",
Value::Number(_) | Value::Integer(_) | Value::Unsigned(_) => "number",
Value::Object(o) => {
// Subclasses always have a typeof = "object", must be a subclass if the prototype chain is > 2, or not a subclass if <=2
let is_not_subclass = matches!(
o.proto().and_then(|p| p.proto()).and_then(|p| p.proto()),
None
);
match o {
Object::FunctionObject(_) => {
if is_not_subclass {
"function"
} else {
"object"
}
}
Object::XmlObject(_) => {
if is_not_subclass {
"xml"
} else {
"object"
}
}
_ => "object",
}
}
Value::String(_) => "string",
};
self.context.avm2.push(Value::String(AvmString::new(
self.context.gc_context,
type_name,
)));
Ok(FrameControl::Continue)
}
/// Implements `Op::EscXAttr`
fn op_esc_xattr(&mut self) -> Result<FrameControl<'gc>, Error> {
let s = self.context.avm2.pop().coerce_to_string(self)?;
// Implementation of `EscapeAttributeValue` from ECMA-357(10.2.1.2)
let mut r = String::new();
for c in s.chars() {
match c {
'"' => r += """,
'<' => r += "<",
'&' => r += "&",
'\u{000A}' => r += "
",
'\u{000D}' => r += "
",
'\u{0009}' => r += "	",
_ => r.push(c),
}
}
self.context
.avm2
.push(AvmString::new(self.context.gc_context, r));
Ok(FrameControl::Continue)
}
/// Implements `Op::EscXElem`
fn op_esc_elem(&mut self) -> Result<FrameControl<'gc>, Error> {
let s = self.context.avm2.pop().coerce_to_string(self)?;
// contrary to the avmplus documentation, this escapes the value on the top of the stack using EscapeElementValue from ECMA-357 *NOT* EscapeAttributeValue.
// Implementation of `EscapeElementValue` from ECMA-357(10.2.1.1)
let mut r = String::new();
for c in s.chars() {
match c {
'<' => r += "<",
'>' => r += ">",
'&' => r += "&",
_ => r.push(c),
}
}
self.context
.avm2
.push(AvmString::new(self.context.gc_context, r));
Ok(FrameControl::Continue)
}
/// Implements `Op::LookupSwitch`
fn op_lookup_switch<'b>(
&mut self,
default_offset: i32,
case_offsets: &[i32],
instruction_start: usize,
reader: &mut Reader<'b>,
full_data: &'b [u8],
) -> Result<FrameControl<'gc>, Error> {
let index = self.context.avm2.pop().coerce_to_i32(self)?;
let offset = case_offsets
.get(index as usize)
.copied()
.unwrap_or(default_offset)
+ instruction_start as i32
- reader.pos(full_data) as i32;
reader.seek(full_data, offset);
Ok(FrameControl::Continue)
}
/// Implements `Op::Coerce`
fn op_coerce(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
index: Index<AbcMultiname>,
) -> Result<FrameControl<'gc>, Error> {
let val = self.context.avm2.pop();
let type_name = self.pool_multiname_static_any(method, index)?;
let param_type = self.resolve_type(type_name)?;
let x = if let Some(param_type) = param_type {
val.coerce_to_type(self, param_type)?
} else {
val
};
self.context.avm2.push(x);
Ok(FrameControl::Continue)
}
pub fn domain(&self) -> Domain<'gc> {
self.outer.domain()
}
fn domain_memory(&self) -> ByteArrayObject<'gc> {
self.outer.domain().domain_memory()
}
/// Implements `Op::Si8`
fn op_si8(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_i32(self)?;
let val = self.context.avm2.pop().coerce_to_i32(self)?;
let dm = self.domain_memory();
let mut dm = dm
.as_bytearray_mut(self.context.gc_context)
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let address =
usize::try_from(address).map_err(|_| "RangeError: The specified range is invalid")?;
dm.write_at_nongrowing(&val.to_le_bytes(), address)?;
Ok(FrameControl::Continue)
}
/// Implements `Op::Si16`
fn op_si16(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_i32(self)?;
let val = self.context.avm2.pop().coerce_to_i32(self)?;
let dm = self.domain_memory();
let mut dm = dm
.as_bytearray_mut(self.context.gc_context)
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let address =
usize::try_from(address).map_err(|_| "RangeError: The specified range is invalid")?;
dm.write_at_nongrowing(&val.to_le_bytes(), address)?;
Ok(FrameControl::Continue)
}
/// Implements `Op::Si32`
fn op_si32(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_i32(self)?;
let val = self.context.avm2.pop().coerce_to_i32(self)?;
let dm = self.domain_memory();
let mut dm = dm
.as_bytearray_mut(self.context.gc_context)
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let address =
usize::try_from(address).map_err(|_| "RangeError: The specified range is invalid")?;
dm.write_at_nongrowing(&val.to_le_bytes(), address)?;
Ok(FrameControl::Continue)
}
/// Implements `Op::Sf32`
fn op_sf32(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_i32(self)?;
let val = self.context.avm2.pop().coerce_to_number(self)? as f32;
let dm = self.domain_memory();
let mut dm = dm
.as_bytearray_mut(self.context.gc_context)
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let address =
usize::try_from(address).map_err(|_| "RangeError: The specified range is invalid")?;
dm.write_at_nongrowing(&val.to_le_bytes(), address)?;
Ok(FrameControl::Continue)
}
/// Implements `Op::Sf64`
fn op_sf64(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_i32(self)?;
let val = self.context.avm2.pop().coerce_to_number(self)?;
let dm = self.domain_memory();
let mut dm = dm
.as_bytearray_mut(self.context.gc_context)
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let address =
usize::try_from(address).map_err(|_| "RangeError: The specified range is invalid")?;
dm.write_at_nongrowing(&val.to_le_bytes(), address)?;
Ok(FrameControl::Continue)
}
/// Implements `Op::Li8`
fn op_li8(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_u32(self)? as usize;
let dm = self.domain_memory();
let dm = dm
.as_bytearray()
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let val = dm.get(address);
if let Some(val) = val {
self.context.avm2.push(val);
} else {
return Err("RangeError: The specified range is invalid".into());
}
Ok(FrameControl::Continue)
}
/// Implements `Op::Li16`
fn op_li16(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_u32(self)? as usize;
let dm = self.domain_memory();
let dm = dm
.as_bytearray()
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let val = dm.read_at(2, address)?;
self.context
.avm2
.push(u16::from_le_bytes(val.try_into().unwrap()));
Ok(FrameControl::Continue)
}
/// Implements `Op::Li32`
fn op_li32(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_u32(self)? as usize;
let dm = self.domain_memory();
let dm = dm
.as_bytearray()
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let val = dm.read_at(4, address)?;
self.context
.avm2
.push(i32::from_le_bytes(val.try_into().unwrap()));
Ok(FrameControl::Continue)
}
/// Implements `Op::Lf32`
fn op_lf32(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_u32(self)? as usize;
let dm = self.domain_memory();
let dm = dm
.as_bytearray()
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let val = dm.read_at(4, address)?;
self.context
.avm2
.push(f32::from_le_bytes(val.try_into().unwrap()));
Ok(FrameControl::Continue)
}
/// Implements `Op::Lf64`
fn op_lf64(&mut self) -> Result<FrameControl<'gc>, Error> {
let address = self.context.avm2.pop().coerce_to_u32(self)? as usize;
let dm = self.domain_memory();
let dm = dm
.as_bytearray()
.ok_or_else(|| "Unable to get bytearray storage".to_string())?;
let val = dm.read_at(8, address)?;
self.context
.avm2
.push(f64::from_le_bytes(val.try_into().unwrap()));
Ok(FrameControl::Continue)
}
/// Implements `Op::Sxi1`
fn op_sxi1(&mut self) -> Result<FrameControl<'gc>, Error> {
let val = self.context.avm2.pop().coerce_to_i32(self)?;
let val = val.wrapping_shl(31).wrapping_shr(31);
self.context.avm2.push(Value::Integer(val));
Ok(FrameControl::Continue)
}
/// Implements `Op::Sxi8`
fn op_sxi8(&mut self) -> Result<FrameControl<'gc>, Error> {
let val = self.context.avm2.pop().coerce_to_i32(self)?;
let val = (val.wrapping_shl(23).wrapping_shr(23) & 0xFF) as i8 as i32;
self.context.avm2.push(Value::Integer(val));
Ok(FrameControl::Continue)
}
/// Implements `Op::Sxi16`
fn op_sxi16(&mut self) -> Result<FrameControl<'gc>, Error> {
let val = self.context.avm2.pop().coerce_to_i32(self)?;
let val = (val.wrapping_shl(15).wrapping_shr(15) & 0xFFFF) as i16 as i32;
self.context.avm2.push(Value::Integer(val));
Ok(FrameControl::Continue)
}
#[cfg(avm_debug)]
fn op_debug(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
is_local_register: bool,
register_name: Index<String>,
register: u8,
) -> Result<FrameControl<'gc>, Error> {
if is_local_register {
let register_name = self.pool_string(method, register_name)?;
let value = self.local_register(register as u32)?;
avm_debug!(self.avm2(), "Debug: {} = {:?}", register_name, value);
} else {
avm_debug!(self.avm2(), "Unknown debugging mode!");
}
Ok(FrameControl::Continue)
}
#[cfg(not(avm_debug))]
fn op_debug(
&mut self,
_method: Gc<'gc, BytecodeMethod<'gc>>,
_is_local_register: bool,
_register_name: Index<String>,
_register: u8,
) -> Result<FrameControl<'gc>, Error> {
Ok(FrameControl::Continue)
}
#[cfg(avm_debug)]
fn op_debug_file(
&mut self,
method: Gc<'gc, BytecodeMethod<'gc>>,
file_name: Index<String>,
) -> Result<FrameControl<'gc>, Error> {
let file_name = self.pool_string(method, file_name)?;
avm_debug!(self.avm2(), "File: {}", file_name);
Ok(FrameControl::Continue)
}
#[cfg(not(avm_debug))]
fn op_debug_file(
&mut self,
_method: Gc<'gc, BytecodeMethod<'gc>>,
_file_name: Index<String>,
) -> Result<FrameControl<'gc>, Error> {
Ok(FrameControl::Continue)
}
fn op_debug_line(&mut self, line_num: u32) -> Result<FrameControl<'gc>, Error> {
avm_debug!(self.avm2(), "Line: {}", line_num);
Ok(FrameControl::Continue)
}
fn op_bkpt(&mut self) -> Result<FrameControl<'gc>, Error> {
// while a debugger is not attached, this is a no-op
Ok(FrameControl::Continue)
}
fn op_bkpt_line(&mut self, _line_num: u32) -> Result<FrameControl<'gc>, Error> {
// while a debugger is not attached, this is a no-op
Ok(FrameControl::Continue)
}
fn op_timestamp(&mut self) -> Result<FrameControl<'gc>, Error> {
// while a debugger is not attached, this is a no-op
Ok(FrameControl::Continue)
}
}
| 34.223168 | 163 | 0.568633 |
8f552a76244778db31029d09f9881dd1805a76c0 | 7,242 | // Copyright 2020 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use crate::actix_web::{error::BlockingError, web, Error, HttpRequest, HttpResponse};
use crate::admin::messages::CircuitProposal;
use crate::admin::rest_api::error::ProposalRouteError;
use crate::admin::service::AdminCommands;
use crate::futures::{future::IntoFuture, Future};
use crate::rest_api::paging::{get_response_paging_info, Paging, DEFAULT_LIMIT, DEFAULT_OFFSET};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct ListProposalsResponse {
data: Vec<CircuitProposal>,
paging: Paging,
}
pub fn fetch_proposal<A: AdminCommands + Clone + 'static>(
request: HttpRequest,
admin_commands: web::Data<A>,
) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {
let circuit_id = request
.match_info()
.get("circuit_id")
.unwrap_or("")
.to_string();
Box::new(
web::block(move || {
let proposal = admin_commands
.fetch_proposal(circuit_id.clone())
.map_err(|err| ProposalRouteError::InternalError(err.to_string()))?;
if let Some(proposal) = proposal {
let proposal = CircuitProposal::from_proto(proposal)
.map_err(|err| ProposalRouteError::InternalError(err.to_string()))?;
Ok(proposal)
} else {
Err(ProposalRouteError::NotFound(format!(
"Unable to find proposal: {}",
circuit_id
)))
}
})
.then(|res| match res {
Ok(proposal) => Ok(HttpResponse::Ok().json(proposal)),
Err(err) => match err {
BlockingError::Error(err) => match err {
ProposalRouteError::InternalError(_) => {
error!("{}", err);
Ok(HttpResponse::InternalServerError().into())
}
ProposalRouteError::NotFound(err) => Ok(HttpResponse::NotFound().json(err)),
},
_ => Ok(HttpResponse::InternalServerError().into()),
},
}),
)
}
pub fn list_proposals<A: AdminCommands + Clone + 'static>(
req: HttpRequest,
admin_commands: web::Data<A>,
) -> Box<dyn Future<Item = HttpResponse, Error = Error>> {
let query: web::Query<HashMap<String, String>> =
if let Ok(q) = web::Query::from_query(req.query_string()) {
q
} else {
return Box::new(
HttpResponse::BadRequest()
.json(json!({
"message": "Invalid query"
}))
.into_future(),
);
};
let offset = match query.get("offset") {
Some(value) => match value.parse::<usize>() {
Ok(val) => val,
Err(err) => {
return Box::new(
HttpResponse::BadRequest()
.json(format!(
"Invalid offset value passed: {}. Error: {}",
value, err
))
.into_future(),
)
}
},
None => DEFAULT_OFFSET,
};
let limit = match query.get("limit") {
Some(value) => match value.parse::<usize>() {
Ok(val) => val,
Err(err) => {
return Box::new(
HttpResponse::BadRequest()
.json(format!(
"Invalid limit value passed: {}. Error: {}",
value, err
))
.into_future(),
)
}
},
None => DEFAULT_LIMIT,
};
let mut link = format!("{}?", req.uri().path());
let filters = match query.get("filter") {
Some(value) => {
link.push_str(&format!("filter={}&", value));
Some(value.to_string())
}
None => None,
};
Box::new(query_list_proposals(
admin_commands,
link,
filters,
Some(offset),
Some(limit),
))
}
fn query_list_proposals<A: AdminCommands + Clone + 'static>(
admin_commands: web::Data<A>,
link: String,
filters: Option<String>,
offset: Option<usize>,
limit: Option<usize>,
) -> impl Future<Item = HttpResponse, Error = Error> {
web::block(move || {
let proposals = admin_commands
.list_proposals()
.map_err(|err| ProposalRouteError::InternalError(err.to_string()))?;
let offset_value = offset.unwrap_or(0);
let limit_value = limit.unwrap_or_else(|| proposals.total());
if proposals.total() != 0 {
if let Some(filter) = filters {
let filtered_proposals: Vec<CircuitProposal> = proposals
.filter(|(_, proposal)| proposal.circuit.circuit_management_type == filter)
.map(|(_, proposal)| proposal)
.collect();
let total_count = filtered_proposals.len();
let proposals_data: Vec<CircuitProposal> = filtered_proposals
.into_iter()
.skip(offset_value)
.take(limit_value)
.collect();
Ok((proposals_data, link, limit, offset, total_count))
} else {
let total_count = proposals.total();
let proposals_data: Vec<CircuitProposal> = proposals
.skip(offset_value)
.take(limit_value)
.map(|(_, proposal)| proposal)
.collect();
Ok((proposals_data, link, limit, offset, total_count))
}
} else {
Ok((vec![], link, limit, offset, proposals.total()))
}
})
.then(|res| match res {
Ok((circuits, link, limit, offset, total_count)) => {
Ok(HttpResponse::Ok().json(ListProposalsResponse {
data: circuits,
paging: get_response_paging_info(limit, offset, &link, total_count),
}))
}
Err(err) => match err {
BlockingError::Error(err) => match err {
ProposalRouteError::InternalError(_) => {
error!("{}", err);
Ok(HttpResponse::InternalServerError().into())
}
ProposalRouteError::NotFound(err) => Ok(HttpResponse::NotFound().json(err)),
},
_ => Ok(HttpResponse::InternalServerError().into()),
},
})
}
| 35.674877 | 96 | 0.51947 |
e529219b8cbd7405573fee486fcd81ed7d10418a | 144 | // ANCHOR: here
fn bar() -> ! {
// -- partie masquée ici --
// ANCHOR_END: here
panic!();
// ANCHOR: here
}
// ANCHOR_END: here
| 16 | 31 | 0.513889 |
ccd3bcecfe128357d6436d4a3e9dc87be1b756b6 | 14,698 | use super::{input::ParserInput, PResult, Parser};
use crate::error::{Error, ErrorKind};
use swc_atoms::js_word;
use swc_common::Span;
use swc_css_ast::*;
impl<I> Parser<I>
where
I: ParserInput,
{
pub(super) fn parse_selectors(&mut self) -> PResult<Vec<ComplexSelector>> {
self.input.skip_ws()?;
let first = self.parse_complex_selector()?;
let mut buf = vec![first];
loop {
self.input.skip_ws()?;
if !eat!(self, ",") {
break;
}
let s = self.parse_complex_selector()?;
buf.push(s);
}
Ok(buf)
}
/// Ported from `parseComplexSelector` of `esbuild`.
pub(super) fn parse_complex_selector(&mut self) -> PResult<ComplexSelector> {
let start_pos = self.input.cur_span()?.lo;
let sel = self.parse_compound_selector()?;
let mut selectors = vec![sel];
let mut last_pos;
loop {
last_pos = self.input.last_pos()?;
self.input.skip_ws()?;
if is_one_of!(self, EOF, ",", "{") {
break;
}
let combinator = self.parse_combinator()?;
if combinator.is_some() {
self.input.skip_ws()?;
}
let sel = self.parse_compound_selector();
match sel {
Ok(mut sel) => {
sel.combinator = combinator;
selectors.push(sel);
}
Err(err) => return Err(err),
}
}
Ok(ComplexSelector {
span: Span::new(start_pos, last_pos, Default::default()),
selectors,
})
}
pub(super) fn parse_combinator(&mut self) -> PResult<Option<SelectorCombinator>> {
if eat!(self, " ") {
return Ok(Some(SelectorCombinator::Descendant));
}
if eat!(self, "+") {
return Ok(Some(SelectorCombinator::NextSibling));
}
if eat!(self, ">") {
return Ok(Some(SelectorCombinator::Child));
}
if eat!(self, "~") {
return Ok(Some(SelectorCombinator::LaterSibling));
}
Ok(None)
}
fn parse_name_token(&mut self) -> PResult<Text> {
let span = self.input.cur_span()?;
match cur!(self) {
Token::Ident(..) => {
let token = bump!(self);
match token {
Token::Ident(value) => return Ok(Text { span, value }),
_ => {
unreachable!()
}
}
}
_ => Err(Error::new(span, ErrorKind::Expected("Text"))),
}
}
pub(super) fn parse_compound_selector(&mut self) -> PResult<CompoundSelector> {
self.input.skip_ws()?;
let span = self.input.cur_span()?;
let start_pos = span.lo;
let mut has_nest_prefix = false;
// This is an extension: https://drafts.csswg.org/css-nesting-1/
if eat!(self, "&") {
has_nest_prefix = true;
}
let mut type_selector = None;
match cur!(self) {
tok!("|") | Token::Ident(..) | tok!("*") => {
let mut ns_name_name;
let mut ns_name_prefix = None;
if !is!(self, "|") {
// No namespace prefix.
if eat!(self, "*") {
ns_name_name = Some(Text {
span,
value: "*".into(),
});
} else {
ns_name_name = Some(self.parse_name_token()?);
}
} else {
// e.g.
// `|*`
// `|b`
ns_name_name = Some(Text {
span: Span::new(start_pos, start_pos, Default::default()),
value: js_word!(""),
});
bump!(self);
// TODO:
// nsName.Name.Kind = css_lexer.TIdent
}
if eat!(self, "|") {
if !is!(self, Ident) && !is!(self, "*") {
expect!(self, Ident);
return Err(Error::new(span, ErrorKind::InvalidTypeSelector));
}
ns_name_prefix = ns_name_name.take();
if eat!(self, "*") {
ns_name_name = Some(Text {
span,
value: "*".into(),
});
} else {
ns_name_name = Some(self.parse_name_token()?);
}
}
type_selector = Some(NamespacedName {
span: span!(self, start_pos),
prefix: ns_name_prefix,
name: ns_name_name.unwrap(),
});
}
_ => {}
}
let mut subclass_selectors = vec![];
'subclass_selectors: loop {
match cur!(self) {
Token::Hash { is_id, .. } => {
if !*is_id {
break 'subclass_selectors;
}
subclass_selectors.push(self.parse_id_selector()?.into());
}
tok!(".") => {
subclass_selectors.push(self.parse_class_selector()?.into());
}
tok!("[") => {
let attr = self.parse_attr_selector()?;
subclass_selectors.push(attr.into());
}
tok!(":") => {
if peeked_is!(self, ":") {
while is!(self, ":") {
let start = self.input.cur_span()?.lo;
let is_element = peeked_is!(self, ":");
if is_element {
bump!(self);
}
let mut pseudo = self.parse_pseudo_class_selector()?;
pseudo.span.lo = start;
pseudo.is_element = is_element;
subclass_selectors.push(SubclassSelector::PseudoClass(pseudo));
}
break 'subclass_selectors;
}
let pseudo = self.parse_pseudo_class_selector()?;
subclass_selectors.push(SubclassSelector::PseudoClass(pseudo));
}
Token::AtKeyword(..) if self.ctx.allow_at_selctor => {
let name = match bump!(self) {
Token::AtKeyword(kwd) => kwd,
_ => {
unreachable!()
}
};
subclass_selectors.push(SubclassSelector::At(AtSelector {
span,
text: Text { span, value: name },
}));
break 'subclass_selectors;
}
_ => {
break 'subclass_selectors;
}
}
}
let span = span!(self, start_pos);
if !has_nest_prefix && type_selector.is_none() && subclass_selectors.len() == 0 {
return Err(Error::new(span, ErrorKind::InvalidSelector));
}
Ok(CompoundSelector {
span,
has_nest_prefix,
combinator: None,
type_selector,
subclass_selectors,
})
}
fn parse_id_selector(&mut self) -> PResult<IdSelector> {
let span = self.input.cur_span()?;
let text = match bump!(self) {
Token::Hash { value, .. } => Text { span, value },
_ => {
unreachable!()
}
};
Ok(IdSelector {
span: span!(self, span.lo),
text,
})
}
fn parse_class_selector(&mut self) -> PResult<ClassSelector> {
let start = self.input.cur_span()?.lo;
assert_eq!(*cur!(self), tok!("."));
bump!(self);
let text = self.parse_selector_text()?;
Ok(ClassSelector {
span: span!(self, start),
text,
})
}
fn parse_pseudo_class_selector(&mut self) -> PResult<PseudoSelector> {
let start = self.input.cur_span()?.lo;
expect!(self, ":"); // `:`
self.input.skip_ws()?;
if is!(self, Ident) && peeked_is!(self, "(") {
let name = self.parse_selector_text()?;
expect!(self, "(");
let args = self.parse_any_value(false)?;
expect!(self, ")");
return Ok(PseudoSelector {
span: span!(self, start),
is_element: false,
name,
args,
});
}
let name_start = self.input.cur_span()?.lo;
let name = self.parse_selector_text()?;
let name = if eat!(self, Ident) {
name
} else {
Text {
span: span!(self, name_start),
value: js_word!(""),
}
};
Ok(PseudoSelector {
span: span!(self, start),
is_element: false,
name,
args: Default::default(),
})
}
fn parse_attr_selector(&mut self) -> PResult<AttrSelector> {
let start_pos = self.input.cur_span()?.lo;
expect!(self, "[");
let name_start_pos = self.input.cur_span()?.lo;
let mut ns_name_prefix = None;
let mut ns_name_name;
match cur!(self) {
tok!("|") | tok!("*") => {
// "[|x]"
// "[*|x]"
if eat!(self, "*") {
ns_name_prefix = Some(Text {
span: span!(self, name_start_pos),
value: "*".into(),
});
} else {
// "[|attr]" is equivalent to "[attr]". From the
// specification: "In keeping with the
// Namespaces in the XML recommendation, default
// namespaces do not apply to attributes, therefore
// attribute selectors
// without a namespace component apply only to attributes
// that have no namespace (equivalent to
// |attr)."
}
expect!(self, "|");
ns_name_name = self.parse_name_token()?;
}
_ => {
// "[x]"
// "[x|y]"
ns_name_name = self.parse_name_token()?;
if !peeked_is!(self, "=") && eat!(self, "|") {
ns_name_prefix = Some(ns_name_name);
ns_name_name = self.parse_name_token()?;
}
}
}
let name = NamespacedName {
span: span!(self, name_start_pos),
prefix: ns_name_prefix,
name: ns_name_name,
};
self.input.skip_ws()?;
let attr_op = if eat!(self, "=") {
Some(AttrSelectorOp::Equals)
} else {
match cur!(self) {
tok!("~") | tok!("|") | tok!("^") | tok!("$") | tok!("*") => {
let tok = bump!(self);
expect!(self, "=");
Some(match tok {
tok!("~") => AttrSelectorOp::Tilde,
tok!("|") => AttrSelectorOp::Bar,
tok!("^") => AttrSelectorOp::Caret,
tok!("$") => AttrSelectorOp::Dollar,
tok!("*") => AttrSelectorOp::Asterisk,
_ => {
unreachable!()
}
})
}
_ => None,
}
};
let mut matcher_value = None;
let mut matcher_modifier = None;
if let Some(..) = attr_op {
self.input.skip_ws()?;
if !is!(self, Str) && !is!(self, Ident) {
return Err(Error::new(
span!(self, start_pos),
ErrorKind::ExpectedIdentOrStrForAttrSelectorOp,
));
}
let _ = cur!(self);
matcher_value = Some(self.parse_id_or_str_for_attr()?);
self.input.skip_ws()?;
if is!(self, Ident) {
match self.input.cur()? {
Some(Token::Ident(s)) => {
if (&**s).eq_ignore_ascii_case("i") || (&**s).eq_ignore_ascii_case("s") {
matcher_modifier = s.chars().next();
bump!(self);
}
}
_ => {}
}
}
}
expect!(self, "]");
Ok(AttrSelector {
span: span!(self, start_pos),
name,
op: attr_op,
value: matcher_value,
modifier: matcher_modifier,
})
}
fn parse_selector_text(&mut self) -> PResult<Text> {
let span = self.input.cur_span()?;
match cur!(self) {
Token::Ident { .. } => {}
_ => Err(Error::new(span, ErrorKind::ExpectedSelectorText))?,
}
let value = bump!(self);
let value = match value {
Token::Ident(value) => value,
_ => unreachable!(),
};
Ok(Text { span, value })
}
fn parse_id_or_str_for_attr(&mut self) -> PResult<Text> {
let span = self.input.cur_span()?;
match cur!(self) {
Token::Ident { .. } => {
let value = bump!(self);
let value = match value {
Token::Ident(value) => value,
_ => unreachable!(),
};
Ok(Text { span, value })
}
Token::Str { .. } => {
let value = bump!(self);
let value = match value {
Token::Str { value } => value,
_ => unreachable!(),
};
Ok(Text { span, value })
}
_ => Err(Error::new(
span,
ErrorKind::ExpectedIdentOrStrForAttrSelectorOp,
))?,
}
}
}
| 29.220676 | 97 | 0.409239 |
ffd3a937c854d8bb6299c4cf062c6365c46bbc35 | 1,877 | macro_rules! mif {
($condition:expr ; $true_block:tt) => {
if $condition {
$true_block
}
};
($condition:expr ; $always_true:literal ; $true_block:tt) => {
$true_block
};
($condition:expr ; $true_block:tt else $false_block:tt) => {
if $condition {
$true_block
} else {
$false_block
}
};
($condition:expr ; $always_true:literal exec_true_branch ; $true_block:tt else $false_block:tt) => {
$true_block
};
($condition:expr ; $always_false:literal exec_false_branch ; $true_block:tt else $false_block:tt) => {
$false_block
};
}
macro_rules! exclusive_if {
($condition:expr ; $always_true:literal ; $true_block:tt else $false_block:tt) => {
$true_block
};
($condition:expr ; $true_block:tt else $false_block:tt) => {
$false_block
};
}
macro_rules! show_block {
($show_block:literal ; $block:tt) => {
$block
};
($block:tt) => {};
}
macro_rules! hide_block {
($hide_block:literal ; $block: tt) => {};
($block:tt) => { $block }
}
macro_rules! match_align_func {
(single {
single => $b1:tt
multiple => $b2:tt
}) => {
$b1
};
(multiple {
single => $b1:tt
multiple => $b2:tt
}) => {
$b2
};
}
macro_rules! match_object_prop {
(blankobject {
blankobject => $b1:tt
idobject => $b2:tt
}) => {
$b1
};
(idobject {
blankobject => $b1:tt
idobject => $b2:tt
}) => {
$b2
};
}
macro_rules! match_subj_prop {
(blanksubject {
blanksubject => $b1:tt
internalidsubject => $b2:tt
externalidsubject => $b3:tt
}) => {
$b1
};
(internalidsubject {
blanksubject => $b1:tt
internalidsubject => $b2:tt
externalidsubject => $b3:tt
}) => {
$b2
};
(externalidsubject {
blanksubject => $b1:tt
internalidsubject => $b2:tt
externalidsubject => $b3:tt
}) => {
$b3
};
} | 19.153061 | 104 | 0.571124 |
286f997722baee912c094479b464783cfafbff23 | 22,748 | /* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.
Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,
the dvipdfmx project team.
Copyright (C) 1998, 1999 by Mark A. Wicks <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*/
#![allow(
non_camel_case_types,
non_snake_case,
unused_mut
)]
use crate::DisplayExt;
use std::ffi::CStr;
use super::util::{spc_util_read_colorspec, spc_util_read_numbers};
use crate::dpx_dpxutil::parse_c_ident;
use crate::dpx_fontmap::{
is_pdfm_mapline, pdf_append_fontmap_record, pdf_clear_fontmap_record, pdf_init_fontmap_record,
pdf_insert_fontmap_record, pdf_load_fontmap_file, pdf_read_fontmap_line,
pdf_remove_fontmap_record,
};
use crate::dpx_mem::{new, xrealloc};
use crate::dpx_mfileio::work_buffer_u8 as WORK_BUFFER;
use crate::dpx_pdfdev::{pdf_dev_reset_color, pdf_dev_reset_fonts};
use crate::dpx_pdfdoc::{
pdf_doc_add_page_content, pdf_doc_add_page_content_ptr, pdf_doc_set_bgcolor,
};
use crate::dpx_pdfdraw::{
pdf_dev_concat, pdf_dev_get_fixed_point, pdf_dev_grestore, pdf_dev_gsave,
pdf_dev_set_fixed_point,
};
use crate::dpx_pdfparse::{parse_ident, parse_val_ident, skip_white};
use crate::shims::sprintf;
use crate::spc_warn;
use crate::streq_ptr;
use libc::{free, memcmp, strlen, strncmp, strncpy};
pub type size_t = u64;
use super::{spc_arg, spc_env};
pub type spc_handler_fn_ptr = Option<unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32>;
use super::spc_handler;
use crate::dpx_fontmap::fontmap_rec;
use crate::dpx_pdfdev::pdf_coord;
use crate::dpx_pdfdev::pdf_tmatrix;
pub use crate::dpx_pdfcolor::PdfColor;
/* tectonic/core-strutils.h: miscellaneous C string utilities
Copyright 2016-2018 the Tectonic Project
Licensed under the MIT License.
*/
/* Note that we explicitly do *not* change this on Windows. For maximum
* portability, we should probably accept *either* forward or backward slashes
* as directory separators. */
#[no_mangle]
pub unsafe extern "C" fn spc_handler_xtx_do_transform(
mut x_user: f64,
mut y_user: f64,
mut a: f64,
mut b: f64,
mut c: f64,
mut d: f64,
mut e: f64,
mut f: f64,
) -> i32 {
let mut M = pdf_tmatrix::new();
/* Create transformation matrix */
M.a = a;
M.b = b;
M.c = c;
M.d = d;
M.e = (1.0f64 - M.a) * x_user - M.c * y_user + e;
M.f = (1.0f64 - M.d) * y_user - M.b * x_user + f;
pdf_dev_concat(&mut M);
let pt = pdf_dev_get_fixed_point();
pdf_dev_set_fixed_point(x_user - pt.x, y_user - pt.y);
0i32
}
unsafe extern "C" fn spc_handler_xtx_scale(mut spe: *mut spc_env, mut args: *mut spc_arg) -> i32 {
let mut values: [f64; 2] = [0.; 2];
if spc_util_read_numbers(&mut *values.as_mut_ptr().offset(0), 2i32, args) < 2i32 {
return -1i32;
}
(*args).curptr = (*args).endptr;
return spc_handler_xtx_do_transform(
(*spe).x_user,
(*spe).y_user,
values[0],
0i32 as f64,
0i32 as f64,
values[1],
0i32 as f64,
0i32 as f64,
);
}
/* Scaling without gsave/grestore. */
static mut SCALE_FACTORS: *mut pdf_coord = 0 as *const pdf_coord as *mut pdf_coord;
static mut SCALE_FACTOR_COUNT: i32 = -1i32;
unsafe extern "C" fn spc_handler_xtx_bscale(mut spe: *mut spc_env, mut args: *mut spc_arg) -> i32 {
let mut values: [f64; 2] = [0.; 2];
SCALE_FACTOR_COUNT += 1;
if SCALE_FACTOR_COUNT & 0xfi32 == 0 {
SCALE_FACTORS = xrealloc(
SCALE_FACTORS as *mut libc::c_void,
((SCALE_FACTOR_COUNT + 16i32) as u64)
.wrapping_mul(::std::mem::size_of::<pdf_coord>() as u64),
) as *mut pdf_coord
}
if spc_util_read_numbers(&mut *values.as_mut_ptr().offset(0), 2i32, args) < 2i32 {
return -1i32;
}
if values[0].abs() < 1.0e-7f64 || values[1].abs() < 1.0e-7f64 {
return -1i32;
}
(*SCALE_FACTORS.offset(SCALE_FACTOR_COUNT as isize)).x = 1i32 as f64 / values[0];
(*SCALE_FACTORS.offset(SCALE_FACTOR_COUNT as isize)).y = 1i32 as f64 / values[1];
(*args).curptr = (*args).endptr;
return spc_handler_xtx_do_transform(
(*spe).x_user,
(*spe).y_user,
values[0],
0i32 as f64,
0i32 as f64,
values[1],
0i32 as f64,
0i32 as f64,
);
}
unsafe extern "C" fn spc_handler_xtx_escale(mut spe: *mut spc_env, mut args: *mut spc_arg) -> i32 {
let fresh0 = SCALE_FACTOR_COUNT;
SCALE_FACTOR_COUNT = SCALE_FACTOR_COUNT - 1;
let mut factor: pdf_coord = *SCALE_FACTORS.offset(fresh0 as isize);
(*args).curptr = (*args).endptr;
return spc_handler_xtx_do_transform(
(*spe).x_user,
(*spe).y_user,
factor.x,
0i32 as f64,
0i32 as f64,
factor.y,
0i32 as f64,
0i32 as f64,
);
}
unsafe extern "C" fn spc_handler_xtx_rotate(mut spe: *mut spc_env, mut args: *mut spc_arg) -> i32 {
let mut value: f64 = 0.;
if spc_util_read_numbers(&mut value, 1i32, args) < 1i32 {
return -1i32;
}
(*args).curptr = (*args).endptr;
let (s, c) = (value * core::f64::consts::PI / 180.).sin_cos();
spc_handler_xtx_do_transform(
(*spe).x_user,
(*spe).y_user,
c,
s,
-s,
c,
0i32 as f64,
0i32 as f64,
)
}
#[no_mangle]
pub unsafe extern "C" fn spc_handler_xtx_gsave(
mut _spe: *mut spc_env,
mut _args: *mut spc_arg,
) -> i32 {
pdf_dev_gsave();
0i32
}
#[no_mangle]
pub unsafe extern "C" fn spc_handler_xtx_grestore(
mut _spe: *mut spc_env,
mut _args: *mut spc_arg,
) -> i32 {
pdf_dev_grestore();
/*
* Unfortunately, the following line is necessary in case
* of a font or color change inside of the save/restore pair.
* Anything that was done there must be redone, so in effect,
* we make no assumptions about what fonts. We act like we are
* starting a new page.
*/
pdf_dev_reset_fonts(0i32);
pdf_dev_reset_color(0i32);
0i32
}
/* Please remove this.
* This should be handled before processing pages!
*/
unsafe extern "C" fn spc_handler_xtx_papersize(
mut _spe: *mut spc_env,
mut _args: *mut spc_arg,
) -> i32 {
0i32
}
unsafe extern "C" fn spc_handler_xtx_backgroundcolor(
mut spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
if let Ok(colorspec) = spc_util_read_colorspec(spe, args, false) {
pdf_doc_set_bgcolor(Some(&colorspec));
1
} else {
spc_warn!(spe, "No valid color specified?");
-1
}
}
/* FIXME: xdv2pdf's x:fontmapline and x:fontmapfile may have slightly different syntax/semantics */
unsafe extern "C" fn spc_handler_xtx_fontmapline(
mut spe: *mut spc_env,
mut ap: *mut spc_arg,
) -> i32 {
let mut error: i32 = 0i32;
static mut BUFFER: [i8; 1024] = [0; 1024];
skip_white(&mut (*ap).curptr, (*ap).endptr);
if (*ap).curptr >= (*ap).endptr {
spc_warn!(spe, "Empty fontmapline special?");
return -1i32;
}
let opchr = *(*ap).curptr.offset(0);
if opchr as i32 == '-' as i32 || opchr as i32 == '+' as i32 {
(*ap).curptr = (*ap).curptr.offset(1)
}
skip_white(&mut (*ap).curptr, (*ap).endptr);
match opchr as i32 {
45 => {
let map_name = parse_ident(&mut (*ap).curptr, (*ap).endptr);
if !map_name.is_null() {
pdf_remove_fontmap_record(map_name);
free(map_name as *mut libc::c_void);
} else {
spc_warn!(spe, "Invalid fontmap line: Missing TFM name.");
error = -1i32
}
}
_ => {
let mut p = (*ap).curptr;
let mut q = BUFFER.as_mut_ptr();
while p < (*ap).endptr {
let fresh1 = p;
p = p.offset(1);
let fresh2 = q;
q = q.offset(1);
*fresh2 = *fresh1
}
*q = '\u{0}' as i32 as i8;
let mrec = new((1_u64).wrapping_mul(::std::mem::size_of::<fontmap_rec>() as u64) as u32)
as *mut fontmap_rec;
pdf_init_fontmap_record(mrec);
error = pdf_read_fontmap_line(
mrec,
BUFFER.as_mut_ptr(),
(*ap).endptr.wrapping_offset_from((*ap).curptr) as i64 as i32,
is_pdfm_mapline(BUFFER.as_mut_ptr()),
);
if error != 0 {
spc_warn!(spe, "Invalid fontmap line.");
} else if opchr as i32 == '+' as i32 {
pdf_append_fontmap_record((*mrec).map_name, mrec);
} else {
pdf_insert_fontmap_record((*mrec).map_name, mrec);
}
pdf_clear_fontmap_record(mrec);
free(mrec as *mut libc::c_void);
}
}
if error == 0 {
(*ap).curptr = (*ap).endptr
}
0i32
}
unsafe extern "C" fn spc_handler_xtx_fontmapfile(
mut spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
skip_white(&mut (*args).curptr, (*args).endptr);
if (*args).curptr >= (*args).endptr {
return 0i32;
}
let mode = match *(*args).curptr.offset(0) as i32 {
45 => {
(*args).curptr = (*args).curptr.offset(1);
'-' as i32
}
43 => {
(*args).curptr = (*args).curptr.offset(1);
'+' as i32
}
_ => 0,
};
let mapfile = parse_val_ident(&mut (*args).curptr, (*args).endptr);
if mapfile.is_null() {
spc_warn!(spe, "No fontmap file specified.");
-1
} else {
pdf_load_fontmap_file(mapfile, mode)
}
}
static mut OVERLAY_NAME: [i8; 256] = [0; 256];
unsafe extern "C" fn spc_handler_xtx_initoverlay(
mut _spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
skip_white(&mut (*args).curptr, (*args).endptr);
if (*args).curptr >= (*args).endptr {
return -1i32;
}
strncpy(
OVERLAY_NAME.as_mut_ptr(),
(*args).curptr,
(*args).endptr.wrapping_offset_from((*args).curptr) as _,
);
OVERLAY_NAME[(*args).endptr.wrapping_offset_from((*args).curptr) as i64 as usize] = 0_i8;
(*args).curptr = (*args).endptr;
0i32
}
unsafe extern "C" fn spc_handler_xtx_clipoverlay(
mut _spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
skip_white(&mut (*args).curptr, (*args).endptr);
if (*args).curptr >= (*args).endptr {
return -1i32;
}
pdf_dev_grestore();
pdf_dev_gsave();
if strncmp(
OVERLAY_NAME.as_mut_ptr(),
(*args).curptr,
strlen(OVERLAY_NAME.as_mut_ptr()),
) != 0i32
&& strncmp(
b"all\x00" as *const u8 as *const i8,
(*args).curptr,
strlen(b"all\x00" as *const u8 as *const i8),
) != 0i32
{
pdf_doc_add_page_content(b" 0 0 m W n");
}
(*args).curptr = (*args).endptr;
0i32
}
unsafe extern "C" fn spc_handler_xtx_renderingmode(
mut spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
let mut value: f64 = 0.;
if spc_util_read_numbers(&mut value, 1i32, args) < 1i32 {
return -1i32;
}
if (value as i32) < 0i32 || value as i32 > 7i32 {
spc_warn!(spe, "Invalid text rendering mode {}.\n", value as i32,);
return -1i32;
}
sprintf(
WORK_BUFFER.as_mut_ptr() as *mut i8,
b" %d Tr\x00" as *const u8 as *const i8,
value as i32,
);
pdf_doc_add_page_content(
CStr::from_bytes_with_nul(&WORK_BUFFER[..])
.unwrap()
.to_bytes(),
);
skip_white(&mut (*args).curptr, (*args).endptr);
if (*args).curptr < (*args).endptr {
pdf_doc_add_page_content(b" ");
pdf_doc_add_page_content_ptr(
(*args).curptr,
(*args).endptr.wrapping_offset_from((*args).curptr) as i64 as u32,
);
}
(*args).curptr = (*args).endptr;
0i32
}
unsafe extern "C" fn spc_handler_xtx_unsupportedcolor(
mut spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
spc_warn!(
spe,
"xetex-style \\special{{x:{}}} is not supported by this driver;\nupdate document or driver to use \\special{{color}} instead.",
CStr::from_ptr((*args).command).display(),
);
(*args).curptr = (*args).endptr;
0i32
}
unsafe extern "C" fn spc_handler_xtx_unsupported(
mut spe: *mut spc_env,
mut args: *mut spc_arg,
) -> i32 {
spc_warn!(
spe,
"xetex-style \\special{{x:{}}} is not supported by this driver.",
CStr::from_ptr((*args).command).display(),
);
(*args).curptr = (*args).endptr;
0i32
}
static mut XTX_HANDLERS: [spc_handler; 21] = {
[
{
let mut init = spc_handler {
key: b"textcolor\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupportedcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"textcolorpush\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupportedcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"textcolorpop\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupportedcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"rulecolor\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupportedcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"rulecolorpush\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupportedcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"rulecolorpop\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupportedcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"papersize\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_papersize
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"backgroundcolor\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_backgroundcolor
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"gsave\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_gsave
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"grestore\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_grestore
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"scale\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_scale
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"bscale\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_bscale
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"escale\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_escale
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"rotate\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_rotate
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"fontmapline\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_fontmapline
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"fontmapfile\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_fontmapfile
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"shadow\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupported
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"colorshadow\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_unsupported
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"renderingmode\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_renderingmode
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"initoverlay\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_initoverlay
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
{
let mut init = spc_handler {
key: b"clipoverlay\x00" as *const u8 as *const i8,
exec: Some(
spc_handler_xtx_clipoverlay
as unsafe extern "C" fn(_: *mut spc_env, _: *mut spc_arg) -> i32,
),
};
init
},
]
};
#[no_mangle]
pub unsafe extern "C" fn spc_xtx_check_special(mut buf: *const i8, mut len: i32) -> bool {
let mut p = buf;
let endptr = p.offset(len as isize);
skip_white(&mut p, endptr);
if p.offset(strlen(b"x:\x00" as *const u8 as *const i8) as isize) <= endptr
&& memcmp(
p as *const libc::c_void,
b"x:\x00" as *const u8 as *const i8 as *const libc::c_void,
strlen(b"x:\x00" as *const u8 as *const i8),
) == 0
{
return true;
}
false
}
#[no_mangle]
pub unsafe extern "C" fn spc_xtx_setup_handler(
mut sph: *mut spc_handler,
mut spe: *mut spc_env,
mut ap: *mut spc_arg,
) -> i32 {
let mut error: i32 = -1i32;
assert!(!sph.is_null() && !spe.is_null() && !ap.is_null());
skip_white(&mut (*ap).curptr, (*ap).endptr);
if (*ap)
.curptr
.offset(strlen(b"x:\x00" as *const u8 as *const i8) as isize)
>= (*ap).endptr
|| memcmp(
(*ap).curptr as *const libc::c_void,
b"x:\x00" as *const u8 as *const i8 as *const libc::c_void,
strlen(b"x:\x00" as *const u8 as *const i8),
) != 0
{
spc_warn!(spe, "Not x: special???");
return -1i32;
}
(*ap).curptr = (*ap)
.curptr
.offset(strlen(b"x:\x00" as *const u8 as *const i8) as isize);
skip_white(&mut (*ap).curptr, (*ap).endptr);
let q = parse_c_ident(&mut (*ap).curptr, (*ap).endptr);
if !q.is_null() {
for i in 0..(::std::mem::size_of::<[spc_handler; 21]>() as u64)
.wrapping_div(::std::mem::size_of::<spc_handler>() as u64)
{
if streq_ptr(q, XTX_HANDLERS[i as usize].key) {
(*ap).command = XTX_HANDLERS[i as usize].key;
(*sph).key = b"x:\x00" as *const u8 as *const i8;
(*sph).exec = XTX_HANDLERS[i as usize].exec;
skip_white(&mut (*ap).curptr, (*ap).endptr);
error = 0i32;
break;
}
}
free(q as *mut libc::c_void);
}
error
}
| 32.683908 | 135 | 0.527827 |
3a4786507de1b70c44671bc3a028060cca0fbe86 | 1,744 | use crate::common::{get_individual, store_is_completed_into};
use crate::Context;
use std::error::Error;
use v_module::module::Module;
use v_onto::individual::Individual;
pub fn prepare_decision_form(decision_form: &mut Individual, ctx: &mut Context, module: &mut Module, signal: &str) -> Result<(), Box<dyn Error>> {
if signal != "?" {
return Ok(());
}
let taken_decision_uri = decision_form.get_first_literal("v-wf:takenDecision");
if taken_decision_uri.is_none() {
return Ok(());
}
let taken_decision_uri = taken_decision_uri.unwrap();
let possible_decisions = decision_form.get_literals("v-wf:possibleDecisionClass");
if possible_decisions.is_none() {
return Ok(());
}
let mut taken_decision = get_individual(module, &taken_decision_uri)?;
let tdt = taken_decision.get_first_literal("rdf:type");
if tdt.is_none() {
error!("individual {} not content type", taken_decision_uri);
return Ok(());
}
if !possible_decisions.unwrap().contains(&tdt.unwrap()) {
error!("v-wf:takenDecision not content variant of v-wf:possibleDecisionClass");
return Ok(());
}
info!("PREPARE DECISION FORM {}", decision_form.get_id());
if let Some(wo_uri) = decision_form.get_first_literal("bpmn:hasWorkOrder") {
store_is_completed_into(decision_form.get_id(), true, "prepare-decision-form", &ctx.sys_ticket, module)?;
store_is_completed_into(&wo_uri, true, "prepare-decision-form", &ctx.sys_ticket, module)?;
}
Ok(())
}
pub(crate) fn is_decision_form(rdf_types: &[String]) -> bool {
for i_type in rdf_types {
if i_type == "bpmn:DecisionForm" {
return true;
}
}
false
}
| 32.296296 | 146 | 0.662844 |
fb7f895b5d1e036538b09e5ed1f1879bc4b16691 | 3,303 | //! Normalized, validated representation of the App Manifest.
//!
//! The versioned manifest structs are designed to be deserialized from YAML,
//! and so they contain various optional fields. They are not validated, and
//! may contain various invalid combinations of data. In contrast, these types
//! are structured to ensure validity, and are used internally by Holochain.
use super::error::{AppManifestError, AppManifestResult};
use crate::app::app_manifest::current::{DnaLocation, DnaVersionSpec};
use crate::prelude::{CellNick, YamlProperties};
use std::collections::HashMap;
/// Normalized, validated representation of the App Manifest.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AppManifestValidated {
/// Name of the App. This may be used as the installed_app_id.
pub(in crate::app) name: String,
/// The slot descriptions that make up this app.
pub(in crate::app) slots: HashMap<CellNick, AppSlotManifestValidated>,
}
impl AppManifestValidated {
/// Constructor with internal consistency checks.
///
/// NB: never make this struct's fields public. This constructor should be
/// the only way to instantiate this type.
pub(in crate::app) fn new(
name: String,
slots: HashMap<CellNick, AppSlotManifestValidated>,
) -> AppManifestResult<Self> {
for (nick, cell) in slots.iter() {
if let AppSlotManifestValidated::Disabled { clone_limit, .. } = cell {
if *clone_limit == 0 {
return Err(AppManifestError::InvalidStrategyDisabled(nick.to_owned()));
}
}
}
Ok(AppManifestValidated { name, slots })
}
}
/// Rules to determine if and how a Cell will be created for this Dna
#[allow(missing_docs)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum AppSlotManifestValidated {
/// Always create a new Cell when installing this App
Create {
clone_limit: u32,
deferred: bool,
location: DnaLocation,
properties: Option<YamlProperties>,
uuid: Option<String>, // TODO: use UUID
version: Option<DnaVersionSpec>,
},
/// Always create a new Cell when installing the App,
/// and use a unique UUID to ensure a distinct DHT network
CreateClone {
clone_limit: u32,
deferred: bool,
location: DnaLocation,
properties: Option<YamlProperties>,
version: Option<DnaVersionSpec>,
},
/// Require that a Cell is already installed which matches the DNA version
/// spec, and which has an Agent that's associated with this App's agent
/// via DPKI. If no such Cell exists, *app installation fails*.
UseExisting {
clone_limit: u32,
deferred: bool,
version: DnaVersionSpec,
},
/// Try `UseExisting`, and if that fails, fallback to `Create`
CreateIfNotExists {
clone_limit: u32,
deferred: bool,
location: DnaLocation,
properties: Option<YamlProperties>,
uuid: Option<String>, // TODO: use UUID
version: DnaVersionSpec,
},
/// Disallow provisioning altogether. In this case, we expect
/// `clone_limit > 0`: otherwise, no cells will ever be created.
Disabled {
version: DnaVersionSpec,
clone_limit: u32,
},
}
| 37.11236 | 91 | 0.660309 |
dddd1f027bf3f84c1b38953b21e5bd3d2d4c935d | 2,419 | use super::entities::{Entities, Entity, EntityId};
use super::system::InfallibleSystem;
use idcontain::IdMapVec;
use math::{self, Mat4, Rad};
#[derive(Copy, Clone, Debug)]
pub struct Projection {
pub fov: Rad<f32>,
pub aspect_ratio: f32,
pub near: f32,
pub far: f32,
}
pub struct Projections {
map: IdMapVec<Entity, StoredProjection>,
}
impl Projections {
pub fn attach(&mut self, entity: EntityId, projection: Projection) {
let old = self.map.insert(
entity,
StoredProjection {
projection,
matrix: projection.into(),
},
);
if old.is_some() {
error!(
"Entity {:?} already had a projection attached, replacing.",
entity
);
}
}
pub fn get_matrix(&self, entity: EntityId) -> Option<&Mat4> {
self.map.get(entity).map(|stored| &stored.matrix)
}
pub fn replace_with<F, O>(&mut self, entity: EntityId, with: F) -> O
where
F: FnOnce(Option<&mut Projection>) -> O,
{
let stored = self.map.get_mut(entity);
if let Some(stored) = stored {
let output = with(Some(&mut stored.projection));
stored.matrix = stored.projection.into();
output
} else {
with(None)
}
}
}
impl<'context> InfallibleSystem<'context> for Projections {
type Dependencies = &'context Entities;
fn debug_name() -> &'static str {
"projections"
}
fn create(_: &'context Entities) -> Self {
Projections {
map: IdMapVec::with_capacity(128),
}
}
fn update(&mut self, entities: &Entities) {
for &entity in entities.last_removed() {
if self.map.remove(entity).is_some() {
debug!("Removed projection {:?}.", entity);
}
}
}
fn teardown(&mut self, entities: &Entities) {
self.update(entities);
}
fn destroy(mut self, entities: &Entities) {
self.update(entities);
if !self.map.is_empty() {
error!("Projections leaked, {} instances.", self.map.len());
}
}
}
struct StoredProjection {
projection: Projection,
matrix: Mat4,
}
impl Into<Mat4> for Projection {
fn into(self) -> Mat4 {
math::perspective(self.fov, self.aspect_ratio, self.near, self.far)
}
}
| 24.938144 | 76 | 0.555601 |
fcfcc4e5edf1764b02b452b99ef5a96793c6357a | 654 | use kf_protocol::api::Request;
use kf_protocol::derive::Decode;
use kf_protocol::derive::Encode;
use types::SpuId;
use super::KfSPUPeerApiEnum;
#[derive(Decode, Encode, Debug, Default)]
pub struct FetchStreamRequest {
pub spu_id: SpuId,
pub min_bytes: i32,
pub max_bytes: i32,
}
impl Request for FetchStreamRequest {
const API_KEY: u16 = KfSPUPeerApiEnum::FetchStream as u16;
type Response = FetchStreamResponse;
}
#[derive(Decode, Encode, Default, Debug)]
pub struct FetchStreamResponse {
pub spu_id: SpuId,
}
impl FetchStreamResponse {
pub fn new(spu_id: SpuId) -> Self {
FetchStreamResponse { spu_id }
}
}
| 21.8 | 62 | 0.714067 |
1a55334479c93a050248eef645ee400841b101b4 | 36,918 | use std::collections::{HashMap, HashSet};
use domain::anoncreds::credential_definition::{CredentialDefinitionV1, CredentialDefinition};
use domain::anoncreds::proof::{Proof, RequestedProof, Identifier};
use domain::anoncreds::proof_request::{AttributeInfo, PredicateInfo, ProofRequest, NonRevocedInterval};
use domain::anoncreds::revocation_registry::RevocationRegistryV1;
use domain::anoncreds::revocation_registry_definition::RevocationRegistryDefinitionV1;
use domain::anoncreds::schema::{SchemaV1, Schema};
use errors::prelude::*;
use services::anoncreds::helpers::*;
use ursa::cl::{CredentialPublicKey, new_nonce, Nonce};
use ursa::cl::verifier::Verifier as CryptoVerifier;
use services::wallet::language::{parse_from_json, Operator};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct Filter {
schema_id: String,
schema_issuer_did: String,
schema_name: String,
schema_version: String,
issuer_did: String,
cred_def_id: String,
}
pub struct Verifier {}
impl Verifier {
pub fn new() -> Verifier {
Verifier {}
}
pub fn verify(&self,
full_proof: &Proof,
proof_req: &ProofRequest,
schemas: &HashMap<String, SchemaV1>,
cred_defs: &HashMap<String, CredentialDefinitionV1>,
rev_reg_defs: &HashMap<String, RevocationRegistryDefinitionV1>,
rev_regs: &HashMap<String, HashMap<u64, RevocationRegistryV1>>) -> IndyResult<bool> {
trace!("verify >>> full_proof: {:?}, proof_req: {:?}, schemas: {:?}, cred_defs: {:?}, rev_reg_defs: {:?} rev_regs: {:?}",
full_proof, proof_req, schemas, cred_defs, rev_reg_defs, rev_regs);
let received_revealed_attrs: HashMap<String, Identifier> = Verifier::_received_revealed_attrs(&full_proof)?;
let received_unrevealed_attrs: HashMap<String, Identifier> = Verifier::_received_unrevealed_attrs(&full_proof)?;
let received_predicates: HashMap<String, Identifier> = Verifier::_received_predicates(&full_proof)?;
let received_self_attested_attrs: HashSet<String> = Verifier::_received_self_attested_attrs(&full_proof);
Verifier::_compare_attr_from_proof_and_request(proof_req,
&received_revealed_attrs,
&received_unrevealed_attrs,
&received_self_attested_attrs,
&received_predicates)?;
Verifier::_verify_requested_restrictions(&proof_req,
schemas,
cred_defs,
&received_revealed_attrs,
&received_unrevealed_attrs,
&received_predicates,
&received_self_attested_attrs)?;
Verifier::_compare_timestamps_from_proof_and_request(proof_req,
&received_revealed_attrs,
&received_unrevealed_attrs,
&received_self_attested_attrs,
&received_predicates)?;
let mut proof_verifier = CryptoVerifier::new_proof_verifier()?;
let non_credential_schema = build_non_credential_schema()?;
for sub_proof_index in 0..full_proof.identifiers.len() {
let identifier = full_proof.identifiers[sub_proof_index].clone();
let schema: &SchemaV1 = schemas.get(&identifier.schema_id)
.ok_or_else(|| err_msg(IndyErrorKind::InvalidStructure, format!("Schema not found for id: {:?}", identifier.schema_id)))?;
let cred_def: &CredentialDefinitionV1 = cred_defs.get(&identifier.cred_def_id)
.ok_or_else(|| err_msg(IndyErrorKind::InvalidStructure, format!("CredentialDefinition not found for id: {:?}", identifier.cred_def_id)))?;
let (rev_reg_def, rev_reg) =
if let Some(timestamp) = identifier.timestamp {
let rev_reg_id = identifier.rev_reg_id
.clone()
.ok_or_else(|| err_msg(IndyErrorKind::InvalidStructure, "Revocation Registry Id not found"))?;
let rev_reg_def = Some(rev_reg_defs
.get(&rev_reg_id)
.ok_or_else(|| err_msg(IndyErrorKind::InvalidStructure, format!("RevocationRegistryDefinition not found for id: {:?}", identifier.rev_reg_id)))?);
let rev_regs_for_cred = rev_regs
.get(&rev_reg_id)
.ok_or_else(|| err_msg(IndyErrorKind::InvalidStructure, format!("RevocationRegistry not found for id: {:?}", rev_reg_id)))?;
let rev_reg = Some(rev_regs_for_cred
.get(×tamp)
.ok_or_else(|| err_msg(IndyErrorKind::InvalidStructure, format!("RevocationRegistry not found for timestamp: {:?}", timestamp)))?);
(rev_reg_def, rev_reg)
} else { (None, None) };
let attrs_for_credential = Verifier::_get_revealed_attributes_for_credential(sub_proof_index, &full_proof.requested_proof, proof_req)?;
let predicates_for_credential = Verifier::_get_predicates_for_credential(sub_proof_index, &full_proof.requested_proof, proof_req)?;
let credential_schema = build_credential_schema(&schema.attr_names)?;
let sub_proof_request = build_sub_proof_request(&attrs_for_credential, &predicates_for_credential)?;
let credential_pub_key = CredentialPublicKey::build_from_parts(&cred_def.value.primary, cred_def.value.revocation.as_ref())?;
proof_verifier.add_sub_proof_request(&sub_proof_request,
&credential_schema,
&non_credential_schema,
&credential_pub_key,
rev_reg_def.as_ref().map(|r_reg_def| &r_reg_def.value.public_keys.accum_key),
rev_reg.as_ref().map(|r_reg| &r_reg.value))?;
}
let valid = proof_verifier.verify(&full_proof.proof, &proof_req.nonce)?;
trace!("verify <<< valid: {:?}", valid);
Ok(valid)
}
pub fn generate_nonce(&self) -> IndyResult<Nonce>{
trace!("generate_nonce >>> ");
let nonce = new_nonce()?;
trace!("generate_nonce <<< nonce: {:?} ", nonce);
Ok(nonce)
}
fn _get_revealed_attributes_for_credential(sub_proof_index: usize,
requested_proof: &RequestedProof,
proof_req: &ProofRequest) -> IndyResult<Vec<AttributeInfo>> {
trace!("_get_revealed_attributes_for_credential >>> sub_proof_index: {:?}, requested_credentials: {:?}, proof_req: {:?}",
sub_proof_index, requested_proof, proof_req);
let revealed_attrs_for_credential = requested_proof.revealed_attrs
.iter()
.filter(|&(attr_referent, ref revealed_attr_info)|
sub_proof_index == revealed_attr_info.sub_proof_index as usize && proof_req.requested_attributes.contains_key(attr_referent))
.map(|(attr_referent, _)|
proof_req.requested_attributes[attr_referent].clone())
.collect::<Vec<AttributeInfo>>();
trace!("_get_revealed_attributes_for_credential <<< revealed_attrs_for_credential: {:?}", revealed_attrs_for_credential);
Ok(revealed_attrs_for_credential)
}
fn _get_predicates_for_credential(sub_proof_index: usize,
requested_proof: &RequestedProof,
proof_req: &ProofRequest) -> IndyResult<Vec<PredicateInfo>> {
trace!("_get_predicates_for_credential >>> sub_proof_index: {:?}, requested_credentials: {:?}, proof_req: {:?}",
sub_proof_index, requested_proof, proof_req);
let predicates_for_credential = requested_proof.predicates
.iter()
.filter(|&(predicate_referent, requested_referent)|
sub_proof_index == requested_referent.sub_proof_index as usize && proof_req.requested_predicates.contains_key(predicate_referent))
.map(|(predicate_referent, _)|
proof_req.requested_predicates[predicate_referent].clone())
.collect::<Vec<PredicateInfo>>();
trace!("_get_predicates_for_credential <<< predicates_for_credential: {:?}", predicates_for_credential);
Ok(predicates_for_credential)
}
fn _compare_attr_from_proof_and_request(proof_req: &ProofRequest,
received_revealed_attrs: &HashMap<String, Identifier>,
received_unrevealed_attrs: &HashMap<String, Identifier>,
received_self_attested_attrs: &HashSet<String>,
received_predicates: &HashMap<String, Identifier>) -> IndyResult<()> {
let requested_attrs: HashSet<String> = proof_req.requested_attributes
.keys()
.cloned()
.collect();
let received_attrs: HashSet<String> = received_revealed_attrs
.iter()
.chain(received_unrevealed_attrs)
.map(|(r, _)| r.to_string())
.collect::<HashSet<String>>()
.union(&received_self_attested_attrs)
.cloned()
.collect();
if requested_attrs != received_attrs {
return Err(err_msg(IndyErrorKind::InvalidStructure,
format!("Requested attributes {:?} do not correspond to received {:?}", requested_attrs, received_attrs)));
}
let requested_predicates: HashSet<&String> = proof_req.requested_predicates
.keys()
.collect();
let received_predicates_: HashSet<&String> = received_predicates
.keys()
.collect();
if requested_predicates != received_predicates_ {
return Err(err_msg(IndyErrorKind::InvalidStructure,
format!("Requested predicates {:?} do not correspond to received {:?}", requested_predicates, received_predicates)));
}
Ok(())
}
fn _compare_timestamps_from_proof_and_request(proof_req: &ProofRequest,
received_revealed_attrs: &HashMap<String, Identifier>,
received_unrevealed_attrs: &HashMap<String, Identifier>,
received_self_attested_attrs: &HashSet<String>,
received_predicates: &HashMap<String, Identifier>) -> IndyResult<()> {
proof_req.requested_attributes
.iter()
.map(|(referent, info)|
Verifier::_validate_timestamp(&received_revealed_attrs, referent, &proof_req.non_revoked, &info.non_revoked)
.or_else(|_|Verifier::_validate_timestamp(&received_unrevealed_attrs, referent, &proof_req.non_revoked, &info.non_revoked))
.or_else(|_|received_self_attested_attrs.get(referent).map(|_| ()).ok_or_else(|| IndyError::from(IndyErrorKind::InvalidStructure)))
)
.collect::<IndyResult<Vec<()>>>()?;
proof_req.requested_predicates
.iter()
.map(|(referent, info)|
Verifier::_validate_timestamp(received_predicates, referent, &proof_req.non_revoked, &info.non_revoked))
.collect::<IndyResult<Vec<()>>>()?;
Ok(())
}
fn _validate_timestamp(received_: &HashMap<String, Identifier>, referent: &str,
global_interval: &Option<NonRevocedInterval>, local_interval: &Option<NonRevocedInterval>) -> IndyResult<()> {
if get_non_revoc_interval(global_interval, local_interval).is_none() {
return Ok(());
}
if !received_
.get(referent)
.map(|attr| attr.timestamp.is_some())
.unwrap_or(false) {
return Err(IndyError::from(IndyErrorKind::InvalidStructure));
}
Ok(())
}
fn _received_revealed_attrs(proof: &Proof) -> IndyResult<HashMap<String, Identifier>> {
let mut revealed_identifiers: HashMap<String, Identifier> = HashMap::new();
for (referent, info) in proof.requested_proof.revealed_attrs.iter() {
revealed_identifiers.insert(
referent.to_string(),
Verifier::_get_proof_identifier(proof, info.sub_proof_index)?
);
}
Ok(revealed_identifiers)
}
fn _received_unrevealed_attrs(proof: &Proof) -> IndyResult<HashMap<String, Identifier>> {
let mut unrevealed_identifiers: HashMap<String, Identifier> = HashMap::new();
for (referent, info) in proof.requested_proof.unrevealed_attrs.iter() {
unrevealed_identifiers.insert(
referent.to_string(),
Verifier::_get_proof_identifier(proof, info.sub_proof_index)?
);
}
Ok(unrevealed_identifiers)
}
fn _received_predicates(proof: &Proof) -> IndyResult<HashMap<String, Identifier>> {
let mut predicate_identifiers: HashMap<String, Identifier> = HashMap::new();
for (referent, info) in proof.requested_proof.predicates.iter() {
predicate_identifiers.insert(
referent.to_string(),
Verifier::_get_proof_identifier(proof, info.sub_proof_index)?
);
}
Ok(predicate_identifiers)
}
fn _received_self_attested_attrs(proof: &Proof) -> HashSet<String> {
proof.requested_proof.self_attested_attrs
.keys()
.cloned()
.collect()
}
fn _get_proof_identifier(proof: &Proof, index: i32) -> IndyResult<Identifier> {
proof.identifiers
.get(index as usize)
.cloned()
.ok_or_else(|| err_msg(
IndyErrorKind::InvalidStructure,
format!("Identifier not found for index: {}", index)
))
}
fn _verify_requested_restrictions(proof_req: &ProofRequest,
schemas: &HashMap<String, SchemaV1>,
cred_defs: &HashMap<String, CredentialDefinitionV1>,
received_revealed_attrs: &HashMap<String, Identifier>,
received_unrevealed_attrs: &HashMap<String, Identifier>,
received_predicates: &HashMap<String, Identifier>,
self_attested_attrs: &HashSet<String>) -> IndyResult<()> {
let proof_attr_identifiers: HashMap<String, Identifier> = received_revealed_attrs
.iter()
.chain(received_unrevealed_attrs)
.map(|(r, id)| (r.to_string(), id.clone()))
.collect();
let requested_attrs: HashMap<String, AttributeInfo> = proof_req.requested_attributes
.iter()
.filter(|&(referent, info)| !Verifier::_is_self_attested(&referent, &info, self_attested_attrs))
.map(|(referent, info)| (referent.to_string(), info.clone()))
.collect();
for (referent, info) in requested_attrs {
let op = parse_from_json(
&build_wql_query(&info.name, &referent, &info.restrictions, None)?
)?;
let filter = Verifier::_gather_filter_info(&referent, &proof_attr_identifiers, schemas, cred_defs)?;
Verifier::_process_operator(&info.name, &op, &filter)
.map_err(|err| err.extend(format!("Requested restriction validation failed for \"{}\" attribute", &info.name)))?;
}
for (referent, info) in proof_req.requested_predicates.iter() {
let op = parse_from_json(
&build_wql_query(&info.name, &referent, &info.restrictions, None)?
)?;
let filter = Verifier::_gather_filter_info(&referent, received_predicates, schemas, cred_defs)?;
Verifier::_process_operator(&info.name, &op, &filter)
.map_err(|err| err.extend(format!("Requested restriction validation failed for \"{}\" predicate", &info.name)))?;
}
Ok(())
}
fn _is_self_attested(referent: &str, info: &AttributeInfo, self_attested_attrs: &HashSet<String>) -> bool {
match info.restrictions.as_ref() {
Some(&serde_json::Value::Array(ref array)) if array.is_empty() =>
self_attested_attrs.contains(referent),
None => self_attested_attrs.contains(referent),
Some(_) => false
}
}
fn _gather_filter_info(referent: &str,
identifiers: &HashMap<String, Identifier>,
schemas: &HashMap<String, SchemaV1>,
cred_defs: &HashMap<String, CredentialDefinitionV1>) -> IndyResult<Filter> {
let identifier = identifiers
.get(referent)
.ok_or_else(|| err_msg(
IndyErrorKind::InvalidState,
format!("Identifier not found for referent: {}", referent))
)?;
let schema: &SchemaV1 = schemas
.get(&identifier.schema_id)
.ok_or_else(|| err_msg(
IndyErrorKind::InvalidStructure,
format!("Schema not found for id: {:?}", identifier.schema_id))
)?;
let cred_def: &CredentialDefinitionV1 = cred_defs
.get(&identifier.cred_def_id)
.ok_or_else(|| err_msg(
IndyErrorKind::InvalidStructure,
format!("CredentialDefinitionV1 not found for id: {:?}", identifier.cred_def_id))
)?;
let schema_issuer_did = Schema::issuer_did(&schema.id)
.ok_or_else(|| err_msg(
IndyErrorKind::InvalidStructure,
format!("schema_id has invalid format: {:?}", schema.id))
)?;
let issuer_did = CredentialDefinition::issuer_did(&cred_def.id)
.ok_or_else(|| err_msg(
IndyErrorKind::InvalidStructure,
format!("cred_def_id has invalid format: {:?}", cred_def.id))
)?;
Ok(Filter {
schema_id: identifier.schema_id.to_string(),
schema_name: schema.name.to_string(),
schema_issuer_did: schema_issuer_did.to_string(),
schema_version: schema.version.to_string(),
cred_def_id: identifier.cred_def_id.to_string(),
issuer_did: issuer_did.to_string()
})
}
fn _process_operator(attr: &str,
restriction_op: &Operator,
filter: &Filter) -> IndyResult<()> {
match restriction_op {
Operator::Eq(ref tag_name, ref tag_value) => {
let tag_name = tag_name.from_utf8()?;
Verifier::_process_filter(&tag_name, &tag_value.value(), filter)
.map_err(|err| err.extend(format!("$eq operator validation failed for tag: \"{}\", value: \"{}\"", tag_name, tag_value.value())))
}
Operator::Neq(ref tag_name, ref tag_value) => {
let tag_name = tag_name.from_utf8()?;
if Verifier::_process_filter(&tag_name, &tag_value.value(), filter).is_err() {
Ok(())
} else {
Err(IndyError::from_msg(IndyErrorKind::ProofRejected,
format!("$neq operator validation failed for tag: \"{}\", value: \"{}\". Condition was passed.", tag_name, tag_value.value())))
}
}
Operator::In(ref tag_name, ref tag_values) => {
let tag_name = tag_name.from_utf8()?;
let res = tag_values
.iter()
.any(|val| Verifier::_process_filter(&tag_name, &val.value(), filter).is_ok());
if res {
Ok(())
} else {
Err(IndyError::from_msg(IndyErrorKind::ProofRejected,
format!("$in operator validation failed for tag: \"{}\", values \"{:?}\".", tag_name, tag_values)))
}
}
Operator::And(ref operators) => {
operators
.iter()
.map(|op| Verifier::_process_operator(attr, op, filter))
.collect::<IndyResult<Vec<()>>>()
.map(|_| ())
.map_err(|err| err.extend("$and operator validation failed."))
}
Operator::Or(ref operators) => {
let res = operators
.iter()
.any(|op| Verifier::_process_operator(attr, op, filter).is_ok());
if res {
Ok(())
} else {
Err(IndyError::from_msg(IndyErrorKind::ProofRejected, "$or operator validation failed. All conditions were failed."))
}
}
Operator::Not(ref operator) => {
if Verifier::_process_operator(attr, &*operator, filter).is_err() {
Ok(())
} else {
Err(IndyError::from_msg(IndyErrorKind::ProofRejected, "$not operator validation failed. All conditions were passed."))
}
}
_ => Err(IndyError::from_msg(IndyErrorKind::ProofRejected, "unsupported operator"))
}
}
fn _process_filter(tag: &str,
tag_value: &str,
filter: &Filter) -> IndyResult<()> {
match tag {
tag_ @ "schema_id" => Verifier::_precess_filed(tag_, &filter.schema_id, tag_value),
tag_ @ "schema_issuer_did" => Verifier::_precess_filed(tag_, &filter.schema_issuer_did, tag_value),
tag_ @ "schema_name" => Verifier::_precess_filed(tag_, &filter.schema_name, tag_value),
tag_ @ "schema_version" => Verifier::_precess_filed(tag_, &filter.schema_version, tag_value),
tag_ @ "cred_def_id" => Verifier::_precess_filed(tag_, &filter.cred_def_id, tag_value),
tag_ @ "issuer_did" => Verifier::_precess_filed(tag_, &filter.issuer_did, tag_value),
x if Verifier::_is_attr_operator(x) => Ok(()),
_ => Err(err_msg(IndyErrorKind::InvalidStructure, "Unknown Filter Type"))
}
}
fn _precess_filed(filed: &str, filter_value: &str, tag_value: &str) -> IndyResult<()> {
if filter_value == tag_value {
Ok(())
} else {
Err(IndyError::from_msg(IndyErrorKind::ProofRejected, format!("\"{}\" values are different: expected: \"{}\", actual: \"{}\"", filed, tag_value, filter_value)))
}
}
fn _is_attr_operator(key: &str) -> bool { key.starts_with("attr::") && key.ends_with("::marker") }
}
#[cfg(test)]
mod tests {
use super::*;
use services::wallet::language::{TagName, TargetValue};
pub const SCHEMA_ID: &str = "123";
pub const SCHEMA_NAME: &str = "Schema Name";
pub const SCHEMA_ISSUER_DID: &str = "234";
pub const SCHEMA_VERSION: &str = "1.2.3";
pub const CRED_DEF_ID: &str = "345";
pub const ISSUER_DID: &str = "456";
fn encrypted_tag(tag: String) -> TagName { TagName::EncryptedTagName(tag.into_bytes()) }
fn unencrypted_target(tag: String) -> TargetValue { TargetValue::Unencrypted(tag) }
fn schema_id_tag() -> TagName { encrypted_tag("schema_id".to_string()) }
fn schema_name_tag() -> TagName { encrypted_tag("schema_name".to_string()) }
fn schema_issuer_did_tag() -> TagName { encrypted_tag("schema_issuer_did".to_string()) }
fn schema_version_tag() -> TagName { encrypted_tag("schema_version".to_string()) }
fn cred_def_id_tag() -> TagName { encrypted_tag("cred_def_id".to_string()) }
fn issuer_did_tag() -> TagName { encrypted_tag("issuer_did".to_string()) }
fn attr_tag() -> TagName { encrypted_tag("attr::zip::marker".to_string()) }
fn bad_attr_tag() -> TagName { encrypted_tag("bad::zip::marker".to_string()) }
fn filter() -> Filter {
Filter {
schema_id: SCHEMA_ID.to_string(),
schema_name: SCHEMA_NAME.to_string(),
schema_issuer_did: SCHEMA_ISSUER_DID.to_string(),
schema_version: SCHEMA_VERSION.to_string(),
cred_def_id: CRED_DEF_ID.to_string(),
issuer_did: ISSUER_DID.to_string(),
}
}
#[test]
fn test_process_op_eq() {
let filter = filter();
let mut op = Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string()));
Verifier::_process_operator("zip", &op, &filter).unwrap();
op = Operator::And(vec![
Operator::Eq(attr_tag(), unencrypted_target("1".to_string())),
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
]);
Verifier::_process_operator("zip", &op, &filter).unwrap();
op = Operator::And(vec![
Operator::Eq(bad_attr_tag(), unencrypted_target("1".to_string())),
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::Eq(schema_id_tag(), unencrypted_target("NOT HERE".to_string()));
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
}
#[test]
fn test_process_op_ne() {
let filter = filter();
let mut op = Operator::Neq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string()));
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::Neq(schema_id_tag(), unencrypted_target("NOT HERE".to_string()));
Verifier::_process_operator("zip", &op, &filter).unwrap()
}
#[test]
fn test_process_op_in() {
let filter = filter();
let mut cred_def_ids = vec![unencrypted_target("Not Here".to_string())];
let mut op = Operator::In(cred_def_id_tag(), cred_def_ids.clone());
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
cred_def_ids.push(unencrypted_target(CRED_DEF_ID.to_string()));
op = Operator::In(cred_def_id_tag(), cred_def_ids.clone());
Verifier::_process_operator("zip", &op, &filter).unwrap()
}
#[test]
fn test_process_op_or() {
let filter = filter();
let mut op = Operator::Or(vec![
Operator::Eq(schema_id_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::Or(vec![
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
]);
Verifier::_process_operator("zip", &op, &filter).unwrap()
}
#[test]
fn test_process_op_and() {
let filter = filter();
let mut op = Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target(CRED_DEF_ID.to_string()))
]);
Verifier::_process_operator("zip", &op, &filter).unwrap()
}
#[test]
fn test_process_op_not() {
let filter = filter();
let mut op = Operator::Not(Box::new(Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target(CRED_DEF_ID.to_string()))
])));
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::Not(Box::new(Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
])));
Verifier::_process_operator("zip", &op, &filter).unwrap()
}
#[test]
fn test_proccess_op_or_with_nested_and() {
let filter = filter();
let mut op = Operator::Or(vec![
Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_issuer_did_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target("Not Here".to_string()))
]),
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::Or(vec![
Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_issuer_did_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target("Not Here".to_string()))
]),
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::Or(vec![
Operator::And(vec![
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target(CRED_DEF_ID.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_issuer_did_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target("Not Here".to_string()))
]),
]);
Verifier::_process_operator("zip", &op, &filter).unwrap()
}
#[test]
fn test_verify_op_complex_nested() {
let filter = filter();
let mut op = Operator::And(vec![
Operator::And(vec![
Operator::Or(vec![
Operator::Eq(schema_name_tag(), unencrypted_target("Not Here".to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target(CRED_DEF_ID.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_issuer_did_tag(), unencrypted_target(SCHEMA_ISSUER_DID.to_string())),
Operator::Eq(schema_name_tag(), unencrypted_target(SCHEMA_NAME.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_version_tag(), unencrypted_target(SCHEMA_VERSION.to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target(ISSUER_DID.to_string()))
]),
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
op = Operator::And(vec![
Operator::And(vec![
Operator::Or(vec![
Operator::Eq(schema_name_tag(), unencrypted_target(SCHEMA_NAME.to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target(CRED_DEF_ID.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_issuer_did_tag(), unencrypted_target(SCHEMA_ISSUER_DID.to_string())),
Operator::Eq(schema_name_tag(), unencrypted_target(SCHEMA_NAME.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_version_tag(), unencrypted_target(SCHEMA_VERSION.to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target(ISSUER_DID.to_string()))
]),
Operator::Not(Box::new(Operator::Eq(schema_version_tag(), unencrypted_target("NOT HERE".to_string()))))
]);
Verifier::_process_operator("zip", &op, &filter).unwrap();
op = Operator::And(vec![
Operator::And(vec![
Operator::Or(vec![
Operator::Eq(schema_name_tag(), unencrypted_target(SCHEMA_NAME.to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target("Not Here".to_string()))
]),
Operator::Eq(schema_id_tag(), unencrypted_target(SCHEMA_ID.to_string())),
Operator::Eq(cred_def_id_tag(), unencrypted_target(CRED_DEF_ID.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_issuer_did_tag(), unencrypted_target(SCHEMA_ISSUER_DID.to_string())),
Operator::Eq(schema_name_tag(), unencrypted_target(SCHEMA_NAME.to_string()))
]),
Operator::And(vec![
Operator::Eq(schema_version_tag(), unencrypted_target(SCHEMA_VERSION.to_string())),
Operator::Eq(issuer_did_tag(), unencrypted_target(ISSUER_DID.to_string()))
]),
Operator::Not(Box::new(Operator::Eq(schema_version_tag(), unencrypted_target(SCHEMA_VERSION.to_string()))))
]);
assert!(Verifier::_process_operator("zip", &op, &filter).is_err());
}
fn _received() -> HashMap<String, Identifier> {
let mut res: HashMap<String, Identifier> = HashMap::new();
res.insert("referent_1".to_string(), Identifier { timestamp: Some(1234), schema_id: String::new(), cred_def_id: String::new(), rev_reg_id: Some(String::new()) });
res.insert("referent_2".to_string(), Identifier { timestamp: None, schema_id: String::new(), cred_def_id: String::new(), rev_reg_id: Some(String::new()) });
res
}
fn _interval() -> NonRevocedInterval {
NonRevocedInterval { from: None, to: Some(1234) }
}
#[test]
fn validate_timestamp_works() {
Verifier::_validate_timestamp(&_received(), "referent_1", &None, &None).unwrap();
Verifier::_validate_timestamp(&_received(), "referent_1", &Some(_interval()), &None).unwrap();
Verifier::_validate_timestamp(&_received(), "referent_1", &None, &Some(_interval())).unwrap();
}
#[test]
fn validate_timestamp_not_work() {
Verifier::_validate_timestamp(&_received(), "referent_2", &Some(_interval()), &None).unwrap_err();
Verifier::_validate_timestamp(&_received(), "referent_2", &None, &Some(_interval())).unwrap_err();
Verifier::_validate_timestamp(&_received(), "referent_3", &None, &Some(_interval())).unwrap_err();
}
}
| 47.821244 | 172 | 0.583076 |
87b30f968e5dbca67bfee52f958daf5e26c89025 | 14,646 | use std::path::{Path, PathBuf};
use pathdiff::diff_paths;
use swc_atoms::js_word;
use swc_common::{FileName, DUMMY_SP};
use swc_ecmascript::ast::{
ArrayLit, ArrowExpr, BinExpr, BinaryOp, BlockStmtOrExpr, Bool, CallExpr, Callee, Expr,
ExprOrSpread, Ident, ImportDecl, ImportSpecifier, KeyValueProp, Lit, MemberExpr, MemberProp,
Null, ObjectLit, Prop, PropName, PropOrSpread, Str, StrKind,
};
use swc_ecmascript::utils::ExprFactory;
use swc_ecmascript::utils::{
ident::{Id, IdentLike},
HANDLER,
};
use swc_ecmascript::visit::{Fold, FoldWith};
pub fn next_dynamic(
is_development: bool,
is_server: bool,
filename: FileName,
pages_dir: Option<PathBuf>,
) -> impl Fold {
NextDynamicPatcher {
is_development,
is_server,
pages_dir,
filename,
dynamic_bindings: vec![],
is_next_dynamic_first_arg: false,
dynamically_imported_specifier: None,
}
}
#[derive(Debug)]
struct NextDynamicPatcher {
is_development: bool,
is_server: bool,
pages_dir: Option<PathBuf>,
filename: FileName,
dynamic_bindings: Vec<Id>,
is_next_dynamic_first_arg: bool,
dynamically_imported_specifier: Option<String>,
}
impl Fold for NextDynamicPatcher {
fn fold_import_decl(&mut self, decl: ImportDecl) -> ImportDecl {
let ImportDecl {
ref src,
ref specifiers,
..
} = decl;
if &src.value == "next/dynamic" {
for specifier in specifiers {
if let ImportSpecifier::Default(default_specifier) = specifier {
self.dynamic_bindings.push(default_specifier.local.to_id());
}
}
}
decl
}
fn fold_call_expr(&mut self, expr: CallExpr) -> CallExpr {
if self.is_next_dynamic_first_arg {
if let Callee::Import(..) = &expr.callee {
if let Expr::Lit(Lit::Str(Str { value, .. })) = &*expr.args[0].expr {
self.dynamically_imported_specifier = Some(value.to_string());
}
}
return expr.fold_children_with(self);
}
let mut expr = expr.fold_children_with(self);
if let Callee::Expr(i) = &expr.callee {
if let Expr::Ident(identifier) = &**i {
if self.dynamic_bindings.contains(&identifier.to_id()) {
if expr.args.is_empty() {
HANDLER.with(|handler| {
handler
.struct_span_err(
identifier.span,
"next/dynamic requires at least one argument",
)
.emit()
});
return expr;
} else if expr.args.len() > 2 {
HANDLER.with(|handler| {
handler
.struct_span_err(
identifier.span,
"next/dynamic only accepts 2 arguments",
)
.emit()
});
return expr;
}
if expr.args.len() == 2 {
match &*expr.args[1].expr {
Expr::Object(_) => {}
_ => {
HANDLER.with(|handler| {
handler
.struct_span_err(
identifier.span,
"next/dynamic options must be an object literal.\nRead more: https://nextjs.org/docs/messages/invalid-dynamic-options-type",
)
.emit();
});
return expr;
}
}
}
self.is_next_dynamic_first_arg = true;
expr.args[0].expr = expr.args[0].expr.clone().fold_with(self);
self.is_next_dynamic_first_arg = false;
if self.dynamically_imported_specifier.is_none() {
return expr;
}
// dev client or server:
// loadableGenerated: {
// modules:
// ["/project/src/file-being-transformed.js -> " + '../components/hello'] }
// prod client
// loadableGenerated: {
// webpack: () => [require.resolveWeak('../components/hello')],
let generated = Box::new(Expr::Object(ObjectLit {
span: DUMMY_SP,
props: if self.is_development || self.is_server {
vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(Ident::new("modules".into(), DUMMY_SP)),
value: Box::new(Expr::Array(ArrayLit {
elems: vec![Some(ExprOrSpread {
expr: Box::new(Expr::Bin(BinExpr {
span: DUMMY_SP,
op: BinaryOp::Add,
left: Box::new(Expr::Lit(Lit::Str(Str {
value: format!(
"{} -> ",
rel_filename(
self.pages_dir.as_deref(),
&self.filename
)
)
.into(),
span: DUMMY_SP,
kind: StrKind::Synthesized {},
has_escape: false,
}))),
right: Box::new(Expr::Lit(Lit::Str(Str {
value: self
.dynamically_imported_specifier
.as_ref()
.unwrap()
.clone()
.into(),
span: DUMMY_SP,
kind: StrKind::Normal {
contains_quote: false,
},
has_escape: false,
}))),
})),
spread: None,
})],
span: DUMMY_SP,
})),
})))]
} else {
vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(Ident::new("webpack".into(), DUMMY_SP)),
value: Box::new(Expr::Arrow(ArrowExpr {
params: vec![],
body: BlockStmtOrExpr::Expr(Box::new(Expr::Array(ArrayLit {
elems: vec![Some(ExprOrSpread {
expr: Box::new(Expr::Call(CallExpr {
callee: Callee::Expr(Box::new(Expr::Member(
MemberExpr {
obj: Box::new(Expr::Ident(Ident {
sym: js_word!("require"),
span: DUMMY_SP,
optional: false,
})),
prop: MemberProp::Ident(Ident {
sym: "resolveWeak".into(),
span: DUMMY_SP,
optional: false,
}),
span: DUMMY_SP,
},
))),
args: vec![ExprOrSpread {
expr: Box::new(Expr::Lit(Lit::Str(Str {
value: self
.dynamically_imported_specifier
.as_ref()
.unwrap()
.clone()
.into(),
span: DUMMY_SP,
kind: StrKind::Synthesized {},
has_escape: false,
}))),
spread: None,
}],
span: DUMMY_SP,
type_args: None,
})),
spread: None,
})],
span: DUMMY_SP,
}))),
is_async: false,
is_generator: false,
span: DUMMY_SP,
return_type: None,
type_params: None,
})),
})))]
},
}));
let mut props =
vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(Ident::new("loadableGenerated".into(), DUMMY_SP)),
value: generated,
})))];
let mut has_ssr_false = false;
if expr.args.len() == 2 {
if let Expr::Object(ObjectLit {
props: options_props,
..
}) = &*expr.args[1].expr
{
for prop in options_props.iter() {
if let Some(KeyValueProp { key, value }) = match prop {
PropOrSpread::Prop(prop) => match &**prop {
Prop::KeyValue(key_value_prop) => Some(key_value_prop),
_ => None,
},
_ => None,
} {
if let Some(Ident {
sym,
span: _,
optional: _,
}) = match key {
PropName::Ident(ident) => Some(ident),
_ => None,
} {
if sym == "ssr" {
if let Some(Lit::Bool(Bool {
value: false,
span: _,
})) = match &**value {
Expr::Lit(lit) => Some(lit),
_ => None,
} {
has_ssr_false = true
}
}
}
}
}
props.extend(options_props.iter().cloned());
}
}
if has_ssr_false && self.is_server {
expr.args[0] = Lit::Null(Null { span: DUMMY_SP }).as_arg();
}
let second_arg = ExprOrSpread {
spread: None,
expr: Box::new(Expr::Object(ObjectLit {
span: DUMMY_SP,
props,
})),
};
if expr.args.len() == 2 {
expr.args[1] = second_arg;
} else {
expr.args.push(second_arg)
}
self.dynamically_imported_specifier = None;
}
}
}
expr
}
}
fn rel_filename(base: Option<&Path>, file: &FileName) -> String {
let base = match base {
Some(v) => v,
None => return file.to_string(),
};
let file = match file {
FileName::Real(v) => v,
_ => {
return file.to_string();
}
};
let rel_path = diff_paths(&file, base);
let rel_path = match rel_path {
Some(v) => v,
None => return file.display().to_string(),
};
rel_path.display().to_string()
}
| 45.484472 | 158 | 0.313533 |
e99a9e4a66574300694ce6350a354b3ddd83bc5f | 5,344 | #[doc = "Reader of register ANACTRL_SET"]
pub type R = crate::R<u32, super::ANACTRL_SET>;
#[doc = "Writer for register ANACTRL_SET"]
pub type W = crate::W<u32, super::ANACTRL_SET>;
#[doc = "Register ANACTRL_SET `reset()`'s with value 0x0a00_0402"]
impl crate::ResetValue for super::ANACTRL_SET {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0a00_0402
}
}
#[doc = "Reader of field `LVI_EN`"]
pub type LVI_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LVI_EN`"]
pub struct LVI_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LVI_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `PFD_CLK_SEL`"]
pub type PFD_CLK_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `PFD_CLK_SEL`"]
pub struct PFD_CLK_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> PFD_CLK_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2);
self.w
}
}
#[doc = "Possible values of the field `DEV_PULLDOWN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DEV_PULLDOWN_A {
#[doc = "The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare disabled in device mode."]
VALUE0,
#[doc = "The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare enabled in device mode."]
VALUE1,
}
impl From<DEV_PULLDOWN_A> for bool {
#[inline(always)]
fn from(variant: DEV_PULLDOWN_A) -> Self {
match variant {
DEV_PULLDOWN_A::VALUE0 => false,
DEV_PULLDOWN_A::VALUE1 => true,
}
}
}
#[doc = "Reader of field `DEV_PULLDOWN`"]
pub type DEV_PULLDOWN_R = crate::R<bool, DEV_PULLDOWN_A>;
impl DEV_PULLDOWN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DEV_PULLDOWN_A {
match self.bits {
false => DEV_PULLDOWN_A::VALUE0,
true => DEV_PULLDOWN_A::VALUE1,
}
}
#[doc = "Checks if the value of the field is `VALUE0`"]
#[inline(always)]
pub fn is_value0(&self) -> bool {
*self == DEV_PULLDOWN_A::VALUE0
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
*self == DEV_PULLDOWN_A::VALUE1
}
}
#[doc = "Write proxy for field `DEV_PULLDOWN`"]
pub struct DEV_PULLDOWN_W<'a> {
w: &'a mut W,
}
impl<'a> DEV_PULLDOWN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DEV_PULLDOWN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare disabled in device mode."]
#[inline(always)]
pub fn value0(self) -> &'a mut W {
self.variant(DEV_PULLDOWN_A::VALUE0)
}
#[doc = "The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare enabled in device mode."]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(DEV_PULLDOWN_A::VALUE1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
impl R {
#[doc = "Bit 1 - Vow voltage detector enable bit."]
#[inline(always)]
pub fn lvi_en(&self) -> LVI_EN_R {
LVI_EN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bits 2:3 - For normal USB operation, this bit field must remain at value 2'b00."]
#[inline(always)]
pub fn pfd_clk_sel(&self) -> PFD_CLK_SEL_R {
PFD_CLK_SEL_R::new(((self.bits >> 2) & 0x03) as u8)
}
#[doc = "Bit 10 - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins"]
#[inline(always)]
pub fn dev_pulldown(&self) -> DEV_PULLDOWN_R {
DEV_PULLDOWN_R::new(((self.bits >> 10) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - Vow voltage detector enable bit."]
#[inline(always)]
pub fn lvi_en(&mut self) -> LVI_EN_W {
LVI_EN_W { w: self }
}
#[doc = "Bits 2:3 - For normal USB operation, this bit field must remain at value 2'b00."]
#[inline(always)]
pub fn pfd_clk_sel(&mut self) -> PFD_CLK_SEL_W {
PFD_CLK_SEL_W { w: self }
}
#[doc = "Bit 10 - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins"]
#[inline(always)]
pub fn dev_pulldown(&mut self) -> DEV_PULLDOWN_W {
DEV_PULLDOWN_W { w: self }
}
}
| 32.785276 | 123 | 0.590569 |
f90f931b57bc8d347bdbb1ee5f809a54194635e9 | 5,500 | use crate::artifacts::{
contract::{CompactContractRef, Contract},
FileToContractsMap,
};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
/// file -> [(contract name -> Contract + solc version)]
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
#[serde(transparent)]
pub struct VersionedContracts(pub FileToContractsMap<Vec<VersionedContract>>);
impl VersionedContracts {
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns an iterator over all files
pub fn files(&self) -> impl Iterator<Item = &String> + '_ {
self.0.keys()
}
/// Finds the _first_ contract with the given name
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let contract = output.find("Greeter").unwrap();
/// # }
/// ```
pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> {
let contract_name = contract.as_ref();
self.contracts().find_map(|(name, contract)| {
(name == contract_name).then(|| CompactContractRef::from(contract))
})
}
/// Removes the _first_ contract with the given name from the set
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let (_, mut contracts) = project.compile().unwrap().output().split();
/// let contract = contracts.remove("Greeter").unwrap();
/// # }
/// ```
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> {
let contract_name = contract.as_ref();
self.0.values_mut().find_map(|all_contracts| {
let mut contract = None;
if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) {
if !contracts.is_empty() {
contract = Some(contracts.remove(0).contract);
}
if !contracts.is_empty() {
all_contracts.insert(c, contracts);
}
}
contract
})
}
/// Given the contract file's path and the contract's name, tries to return the contract's
/// bytecode, runtime bytecode, and abi
pub fn get(&self, path: &str, contract: &str) -> Option<CompactContractRef> {
self.0
.get(path)
.and_then(|contracts| {
contracts.get(contract).and_then(|c| c.get(0).map(|c| &c.contract))
})
.map(CompactContractRef::from)
}
/// Iterate over all contracts and their names
pub fn contracts(&self) -> impl Iterator<Item = (&String, &Contract)> {
self.0
.values()
.flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract))))
}
/// Returns an iterator over (`file`, `name`, `Contract`)
pub fn contracts_with_files(&self) -> impl Iterator<Item = (&String, &String, &Contract)> {
self.0.iter().flat_map(|(file, contracts)| {
contracts
.iter()
.flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract)))
})
}
/// Returns an iterator over (`file`, `name`, `Contract`, `Version`)
pub fn contracts_with_files_and_version(
&self,
) -> impl Iterator<Item = (&String, &String, &Contract, &Version)> {
self.0.iter().flat_map(|(file, contracts)| {
contracts.iter().flat_map(move |(name, c)| {
c.iter().map(move |c| (file, name, &c.contract, &c.version))
})
})
}
/// Returns an iterator over all contracts and their source names.
///
/// ```
/// use std::collections::BTreeMap;
/// use ethers_solc::{ artifacts::*, Artifact };
/// use ethers_solc::artifacts::contract::CompactContractSome;
/// # fn demo(contracts: OutputContracts) {
/// let contracts: BTreeMap<String, CompactContractSome> = contracts
/// .into_contracts()
/// .map(|(k, c)| (k, c.into_compact_contract().unwrap()))
/// .collect();
/// # }
/// ```
pub fn into_contracts(self) -> impl Iterator<Item = (String, Contract)> {
self.0.into_values().flat_map(|c| {
c.into_iter()
.flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract)))
})
}
}
impl AsRef<FileToContractsMap<Vec<VersionedContract>>> for VersionedContracts {
fn as_ref(&self) -> &FileToContractsMap<Vec<VersionedContract>> {
&self.0
}
}
impl AsMut<FileToContractsMap<Vec<VersionedContract>>> for VersionedContracts {
fn as_mut(&mut self) -> &mut FileToContractsMap<Vec<VersionedContract>> {
&mut self.0
}
}
impl IntoIterator for VersionedContracts {
type Item = (String, BTreeMap<String, Vec<VersionedContract>>);
type IntoIter =
std::collections::btree_map::IntoIter<String, BTreeMap<String, Vec<VersionedContract>>>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
/// A contract and the compiler version used to compile it
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VersionedContract {
pub contract: Contract,
pub version: Version,
}
| 33.950617 | 100 | 0.581636 |
bf085365b621cbdfc83c97677a3698db84bc2055 | 2,410 | // Copyright 2020 Adam Reichold
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cmp::min;
use std::io::{BufRead, Read, Result as IoResult};
use encoding_rs::{CoderResult, Decoder, Encoding};
pub struct EncodedReader<R> {
reader: R,
decoder: Decoder,
buf: Vec<u8>,
pos: usize,
}
impl<R> EncodedReader<R> {
pub fn new(reader: R, encoding: &'static Encoding) -> Self {
Self {
reader,
decoder: encoding.new_decoder(),
buf: Vec::new(),
pos: 0,
}
}
}
impl<R: BufRead> BufRead for EncodedReader<R> {
fn fill_buf(&mut self) -> IoResult<&[u8]> {
if self.buf.len() == self.pos {
let buf = self.reader.fill_buf()?;
let max_buf_len = self.decoder.max_utf8_buffer_length(buf.len()).unwrap();
self.buf.resize(max_buf_len, 0);
let last = buf.is_empty();
let (reason, read, written, _) = self.decoder.decode_to_utf8(buf, &mut self.buf, last);
assert_eq!(CoderResult::InputEmpty, reason);
self.reader.consume(read);
self.buf.truncate(written);
self.pos = 0;
}
Ok(&self.buf[self.pos..])
}
fn consume(&mut self, amt: usize) {
self.pos = min(self.pos + amt, self.buf.len());
}
}
impl<R: BufRead> Read for EncodedReader<R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {
let read = self.fill_buf()?.read(buf)?;
self.consume(read);
Ok(read)
}
}
#[cfg(test)]
mod tests {
use super::*;
use encoding_rs::WINDOWS_1252;
#[test]
fn decode_windows_1252() {
let (buf, encoding, _) = WINDOWS_1252.encode("äé");
let mut reader = EncodedReader::new(&*buf, encoding);
let mut buf = String::new();
reader.read_to_string(&mut buf).unwrap();
assert_eq!("äé", buf);
}
}
| 27.386364 | 99 | 0.602075 |
1830b41d0b50681ee7a986b259ae0ca1d7e4655a | 2,932 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Various uses of `T::Item` syntax where the bound that supplies
// `Item` originates in a where-clause, not the declaration of
// `T`. Issue #20300.
use std::marker::{PhantomData};
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT};
use std::sync::atomic::Ordering::SeqCst;
static COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// Preamble.
trait Trait { type Item; }
struct Struct;
impl Trait for Struct {
type Item = u32;
}
// Where-clause attached on the method which declares `T`.
struct A;
impl A {
fn foo<T>(_x: T::Item) where T: Trait {
COUNTER.fetch_add(1, SeqCst);
}
}
// Where-clause attached on the method to a parameter from the struct.
struct B<T>(PhantomData<T>);
impl<T> B<T> {
fn foo(_x: T::Item) where T: Trait {
COUNTER.fetch_add(10, SeqCst);
}
}
// Where-clause attached to free fn.
fn c<T>(_: T::Item) where T : Trait {
COUNTER.fetch_add(100, SeqCst);
}
// Where-clause attached to defaulted and non-defaulted trait method.
trait AnotherTrait {
fn method<T>(&self, _: T::Item) where T: Trait;
fn default_method<T>(&self, _: T::Item) where T: Trait {
COUNTER.fetch_add(1000, SeqCst);
}
}
struct D;
impl AnotherTrait for D {
fn method<T>(&self, _: T::Item) where T: Trait {
COUNTER.fetch_add(10000, SeqCst);
}
}
// Where-clause attached to trait and impl containing the method.
trait YetAnotherTrait<T>
where T : Trait
{
fn method(&self, _: T::Item);
fn default_method(&self, _: T::Item) {
COUNTER.fetch_add(100000, SeqCst);
}
}
struct E<T>(PhantomData<T>);
impl<T> YetAnotherTrait<T> for E<T>
where T : Trait
{
fn method(&self, _: T::Item) {
COUNTER.fetch_add(1000000, SeqCst);
}
}
// Where-clause attached to inherent impl containing the method.
struct F<T>(PhantomData<T>);
impl<T> F<T> where T : Trait {
fn method(&self, _: T::Item) {
COUNTER.fetch_add(10000000, SeqCst);
}
}
// Where-clause attached to struct.
#[allow(dead_code)]
struct G<T> where T : Trait {
data: T::Item,
phantom: PhantomData<T>,
}
fn main() {
A::foo::<Struct>(22);
B::<Struct>::foo(22);
c::<Struct>(22);
D.method::<Struct>(22);
D.default_method::<Struct>(22);
E(PhantomData::<Struct>).method(22);
E(PhantomData::<Struct>).default_method(22);
F(PhantomData::<Struct>).method(22);
G::<Struct> { data: 22, phantom: PhantomData };
assert_eq!(COUNTER.load(SeqCst), 11111111);
}
| 27.148148 | 70 | 0.658254 |
ac730b3578b7f991a8098c66921b2c07bb52d013 | 34,982 | //! A multi-producer, single-consumer queue for sending values across
//! asynchronous tasks.
//!
//! Similarly to the `std`, channel creation provides [`Receiver`] and
//! [`Sender`] handles. [`Receiver`] implements [`Stream`] and allows a task to
//! read values out of the channel. If there is no message to read from the
//! channel, the current task will be notified when a new value is sent.
//! [`Sender`] implements the `Sink` trait and allows a task to send messages into
//! the channel. If the channel is at capacity, the send will be rejected and
//! the task will be notified when additional capacity is available. In other
//! words, the channel provides backpressure.
//!
//! Unbounded channels are also available using the `unbounded` constructor.
//!
//! # Disconnection
//!
//! When all [`Sender`] handles have been dropped, it is no longer
//! possible to send values into the channel. This is considered the termination
//! event of the stream. As such, [`Receiver::poll_next`]
//! will return `Ok(Ready(None))`.
//!
//! If the [`Receiver`] handle is dropped, then messages can no longer
//! be read out of the channel. In this case, all further attempts to send will
//! result in an error.
//!
//! # Clean Shutdown
//!
//! If the [`Receiver`] is simply dropped, then it is possible for
//! there to be messages still in the channel that will not be processed. As
//! such, it is usually desirable to perform a "clean" shutdown. To do this, the
//! receiver will first call `close`, which will prevent any further messages to
//! be sent into the channel. Then, the receiver consumes the channel to
//! completion, at which point the receiver can be dropped.
//!
//! [`Sender`]: struct.Sender.html
//! [`Receiver`]: struct.Receiver.html
//! [`Stream`]: ../../futures_core/stream/trait.Stream.html
//! [`Receiver::poll_next`]:
//! ../../futures_core/stream/trait.Stream.html#tymethod.poll_next
// At the core, the channel uses an atomic FIFO queue for message passing. This
// queue is used as the primary coordination primitive. In order to enforce
// capacity limits and handle back pressure, a secondary FIFO queue is used to
// send parked task handles.
//
// The general idea is that the channel is created with a `buffer` size of `n`.
// The channel capacity is `n + num-senders`. Each sender gets one "guaranteed"
// slot to hold a message. This allows `Sender` to know for a fact that a send
// will succeed *before* starting to do the actual work of sending the value.
// Since most of this work is lock-free, once the work starts, it is impossible
// to safely revert.
//
// If the sender is unable to process a send operation, then the current
// task is parked and the handle is sent on the parked task queue.
//
// Note that the implementation guarantees that the channel capacity will never
// exceed the configured limit, however there is no *strict* guarantee that the
// receiver will wake up a parked task *immediately* when a slot becomes
// available. However, it will almost always unpark a task when a slot becomes
// available and it is *guaranteed* that a sender will be unparked when the
// message that caused the sender to become parked is read out of the channel.
//
// The steps for sending a message are roughly:
//
// 1) Increment the channel message count
// 2) If the channel is at capacity, push the task handle onto the wait queue
// 3) Push the message onto the message queue.
//
// The steps for receiving a message are roughly:
//
// 1) Pop a message from the message queue
// 2) Pop a task handle from the wait queue
// 3) Decrement the channel message count.
//
// It's important for the order of operations on lock-free structures to happen
// in reverse order between the sender and receiver. This makes the message
// queue the primary coordination structure and establishes the necessary
// happens-before semantics required for the acquire / release semantics used
// by the queue structure.
use futures_core::stream::Stream;
use futures_core::task::{LocalWaker, Waker, Poll};
use std::any::Any;
use std::error::Error;
use std::fmt;
use std::marker::Unpin;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
use std::thread;
use std::usize;
use crate::mpsc::queue::{Queue, PopResult};
mod queue;
/// The transmission end of a bounded mpsc channel.
///
/// This value is created by the [`channel`](channel) function.
#[derive(Debug)]
pub struct Sender<T> {
// Channel state shared between the sender and receiver.
inner: Arc<Inner<T>>,
// Handle to the task that is blocked on this sender. This handle is sent
// to the receiver half in order to be notified when the sender becomes
// unblocked.
sender_task: Arc<Mutex<SenderTask>>,
// True if the sender might be blocked. This is an optimization to avoid
// having to lock the mutex most of the time.
maybe_parked: bool,
}
// We never project Pin<&mut Sender> to `Pin<&mut T>`
impl<T> Unpin for Sender<T> {}
/// The transmission end of an unbounded mpsc channel.
///
/// This value is created by the [`unbounded`](unbounded) function.
#[derive(Debug)]
pub struct UnboundedSender<T>(Sender<T>);
trait AssertKinds: Send + Sync + Clone {}
impl AssertKinds for UnboundedSender<u32> {}
/// The receiving end of a bounded mpsc channel.
///
/// This value is created by the [`channel`](channel) function.
#[derive(Debug)]
pub struct Receiver<T> {
inner: Arc<Inner<T>>,
}
/// The receiving end of an unbounded mpsc channel.
///
/// This value is created by the [`unbounded`](unbounded) function.
#[derive(Debug)]
pub struct UnboundedReceiver<T>(Receiver<T>);
// `Pin<&mut UnboundedReceiver<T>>` is never projected to `Pin<&mut T>`
impl<T> Unpin for UnboundedReceiver<T> {}
/// The error type for [`Sender`s](Sender) used as `Sink`s.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SendError {
kind: SendErrorKind,
}
/// The error type returned from [`try_send`](Sender::try_send).
#[derive(Clone, PartialEq, Eq)]
pub struct TrySendError<T> {
err: SendError,
val: T,
}
#[derive(Clone, Debug, PartialEq, Eq)]
enum SendErrorKind {
Full,
Disconnected,
}
/// The error type returned from [`try_next`](Receiver::try_next).
pub struct TryRecvError {
_inner: (),
}
impl fmt::Display for SendError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_full() {
write!(fmt, "send failed because channel is full")
} else {
write!(fmt, "send failed because receiver is gone")
}
}
}
impl Error for SendError {
fn description(&self) -> &str {
if self.is_full() {
"send failed because channel is full"
} else {
"send failed because receiver is gone"
}
}
}
impl SendError {
/// Returns true if this error is a result of the channel being full.
pub fn is_full(&self) -> bool {
match self.kind {
SendErrorKind::Full => true,
_ => false,
}
}
/// Returns true if this error is a result of the receiver being dropped.
pub fn is_disconnected(&self) -> bool {
match self.kind {
SendErrorKind::Disconnected => true,
_ => false,
}
}
}
impl<T> fmt::Debug for TrySendError<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("TrySendError")
.field("kind", &self.err.kind)
.finish()
}
}
impl<T> fmt::Display for TrySendError<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.is_full() {
write!(fmt, "send failed because channel is full")
} else {
write!(fmt, "send failed because receiver is gone")
}
}
}
impl<T: Any> Error for TrySendError<T> {
fn description(&self) -> &str {
if self.is_full() {
"send failed because channel is full"
} else {
"send failed because receiver is gone"
}
}
}
impl<T> TrySendError<T> {
/// Returns true if this error is a result of the channel being full.
pub fn is_full(&self) -> bool {
self.err.is_full()
}
/// Returns true if this error is a result of the receiver being dropped.
pub fn is_disconnected(&self) -> bool {
self.err.is_disconnected()
}
/// Returns the message that was attempted to be sent but failed.
pub fn into_inner(self) -> T {
self.val
}
/// Drops the message and converts into a `SendError`.
pub fn into_send_error(self) -> SendError {
self.err
}
}
impl fmt::Debug for TryRecvError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_tuple("TryRecvError")
.finish()
}
}
impl fmt::Display for TryRecvError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(self.description())
}
}
impl Error for TryRecvError {
fn description(&self) -> &str {
"receiver channel is empty"
}
}
#[derive(Debug)]
struct Inner<T> {
// Max buffer size of the channel. If `None` then the channel is unbounded.
buffer: Option<usize>,
// Internal channel state. Consists of the number of messages stored in the
// channel as well as a flag signalling that the channel is closed.
state: AtomicUsize,
// Atomic, FIFO queue used to send messages to the receiver
message_queue: Queue<Option<T>>,
// Atomic, FIFO queue used to send parked task handles to the receiver.
parked_queue: Queue<Arc<Mutex<SenderTask>>>,
// Number of senders in existence
num_senders: AtomicUsize,
// Handle to the receiver's task.
recv_task: Mutex<ReceiverTask>,
}
// Struct representation of `Inner::state`.
#[derive(Debug, Clone, Copy)]
struct State {
// `true` when the channel is open
is_open: bool,
// Number of messages in the channel
num_messages: usize,
}
#[derive(Debug)]
struct ReceiverTask {
unparked: bool,
task: Option<Waker>,
}
// Returned from Receiver::try_park()
enum TryPark {
Parked,
Closed,
NotEmpty,
}
// The `is_open` flag is stored in the left-most bit of `Inner::state`
const OPEN_MASK: usize = usize::MAX - (usize::MAX >> 1);
// When a new channel is created, it is created in the open state with no
// pending messages.
const INIT_STATE: usize = OPEN_MASK;
// The maximum number of messages that a channel can track is `usize::MAX >> 1`
const MAX_CAPACITY: usize = !(OPEN_MASK);
// The maximum requested buffer size must be less than the maximum capacity of
// a channel. This is because each sender gets a guaranteed slot.
const MAX_BUFFER: usize = MAX_CAPACITY >> 1;
// Sent to the consumer to wake up blocked producers
#[derive(Debug)]
struct SenderTask {
task: Option<Waker>,
is_parked: bool,
}
impl SenderTask {
fn new() -> Self {
SenderTask {
task: None,
is_parked: false,
}
}
fn notify(&mut self) {
self.is_parked = false;
if let Some(task) = self.task.take() {
task.wake();
}
}
}
/// Creates a bounded mpsc channel for communicating between asynchronous tasks.
///
/// Being bounded, this channel provides backpressure to ensure that the sender
/// outpaces the receiver by only a limited amount. The channel's capacity is
/// equal to `buffer + num-senders`. In other words, each sender gets a
/// guaranteed slot in the channel capacity, and on top of that there are
/// `buffer` "first come, first serve" slots available to all senders.
///
/// The [`Receiver`](Receiver) returned implements the
/// [`Stream`](futures_core::stream::Stream) trait, while [`Sender`](Sender) implements
/// `Sink`.
pub fn channel<T>(buffer: usize) -> (Sender<T>, Receiver<T>) {
// Check that the requested buffer size does not exceed the maximum buffer
// size permitted by the system.
assert!(buffer < MAX_BUFFER, "requested buffer size too large");
channel2(Some(buffer))
}
/// Creates an unbounded mpsc channel for communicating between asynchronous
/// tasks.
///
/// A `send` on this channel will always succeed as long as the receive half has
/// not been closed. If the receiver falls behind, messages will be arbitrarily
/// buffered.
///
/// **Note** that the amount of available system memory is an implicit bound to
/// the channel. Using an `unbounded` channel has the ability of causing the
/// process to run out of memory. In this case, the process will be aborted.
pub fn unbounded<T>() -> (UnboundedSender<T>, UnboundedReceiver<T>) {
let (tx, rx) = channel2(None);
(UnboundedSender(tx), UnboundedReceiver(rx))
}
fn channel2<T>(buffer: Option<usize>) -> (Sender<T>, Receiver<T>) {
let inner = Arc::new(Inner {
buffer,
state: AtomicUsize::new(INIT_STATE),
message_queue: Queue::new(),
parked_queue: Queue::new(),
num_senders: AtomicUsize::new(1),
recv_task: Mutex::new(ReceiverTask {
unparked: false,
task: None,
}),
});
let tx = Sender {
inner: inner.clone(),
sender_task: Arc::new(Mutex::new(SenderTask::new())),
maybe_parked: false,
};
let rx = Receiver {
inner,
};
(tx, rx)
}
/*
*
* ===== impl Sender =====
*
*/
impl<T> Sender<T> {
/// Attempts to send a message on this `Sender`, returning the message
/// if there was an error.
pub fn try_send(&mut self, msg: T) -> Result<(), TrySendError<T>> {
// If the sender is currently blocked, reject the message
if !self.poll_unparked(None).is_ready() {
return Err(TrySendError {
err: SendError {
kind: SendErrorKind::Full,
},
val: msg,
});
}
// The channel has capacity to accept the message, so send it
self.do_send(None, msg)
}
/// Send a message on the channel.
///
/// This function should only be called after
/// [`poll_ready`](Sender::poll_ready) has reported that the channel is
/// ready to receive a message.
pub fn start_send(&mut self, msg: T) -> Result<(), SendError> {
self.try_send(msg)
.map_err(|e| e.err)
}
// Do the send without failing
// None means close
fn do_send(&mut self, lw: Option<&LocalWaker>, msg: T)
-> Result<(), TrySendError<T>>
{
// Anyone callig do_send *should* make sure there is room first,
// but assert here for tests as a sanity check.
debug_assert!(self.poll_unparked(None).is_ready());
// First, increment the number of messages contained by the channel.
// This operation will also atomically determine if the sender task
// should be parked.
//
// None is returned in the case that the channel has been closed by the
// receiver. This happens when `Receiver::close` is called or the
// receiver is dropped.
let park_self = match self.inc_num_messages(false) {
Some(park_self) => park_self,
None => return Err(TrySendError {
err: SendError {
kind: SendErrorKind::Disconnected,
},
val: msg,
}),
};
// If the channel has reached capacity, then the sender task needs to
// be parked. This will send the task handle on the parked task queue.
//
// However, when `do_send` is called while dropping the `Sender`,
// `task::current()` can't be called safely. In this case, in order to
// maintain internal consistency, a blank message is pushed onto the
// parked task queue.
if park_self {
self.park(lw);
}
self.queue_push_and_signal(Some(msg));
Ok(())
}
// Do the send without parking current task.
fn do_send_nb(&self, msg: Option<T>) -> Result<(), TrySendError<T>> {
match self.inc_num_messages(msg.is_none()) {
Some(park_self) => assert!(!park_self),
None => {
// The receiver has closed the channel. Only abort if actually
// sending a message. It is important that the stream
// termination (None) is always sent. This technically means
// that it is possible for the queue to contain the following
// number of messages:
//
// num-senders + buffer + 1
//
if let Some(msg) = msg {
return Err(TrySendError {
err: SendError {
kind: SendErrorKind::Disconnected,
},
val: msg,
});
} else {
return Ok(());
}
},
};
self.queue_push_and_signal(msg);
Ok(())
}
fn poll_ready_nb(&self) -> Poll<Result<(), SendError>> {
let state = decode_state(self.inner.state.load(SeqCst));
if state.is_open {
Poll::Ready(Ok(()))
} else {
Poll::Ready(Err(SendError {
kind: SendErrorKind::Full,
}))
}
}
// Push message to the queue and signal to the receiver
fn queue_push_and_signal(&self, msg: Option<T>) {
// Push the message onto the message queue
self.inner.message_queue.push(msg);
// Signal to the receiver that a message has been enqueued. If the
// receiver is parked, this will unpark the task.
self.signal();
}
// Increment the number of queued messages. Returns if the sender should
// block.
fn inc_num_messages(&self, close: bool) -> Option<bool> {
let mut curr = self.inner.state.load(SeqCst);
loop {
let mut state = decode_state(curr);
// The receiver end closed the channel.
if !state.is_open {
return None;
}
// This probably is never hit? Odds are the process will run out of
// memory first. It may be worth to return something else in this
// case?
assert!(state.num_messages < MAX_CAPACITY, "buffer space \
exhausted; sending this messages would overflow the state");
state.num_messages += 1;
// The channel is closed by all sender handles being dropped.
if close {
state.is_open = false;
}
let next = encode_state(&state);
match self.inner.state.compare_exchange(curr, next, SeqCst, SeqCst) {
Ok(_) => {
// Block if the current number of pending messages has exceeded
// the configured buffer size
let park_self = !close && match self.inner.buffer {
Some(buffer) => state.num_messages > buffer,
None => false,
};
return Some(park_self)
}
Err(actual) => curr = actual,
}
}
}
// Signal to the receiver task that a message has been enqueued
fn signal(&self) {
// TODO
// This logic can probably be improved by guarding the lock with an
// atomic.
//
// Do this step first so that the lock is dropped when
// `unpark` is called
let task = {
let mut recv_task = self.inner.recv_task.lock().unwrap();
// If the receiver has already been unparked, then there is nothing
// more to do
if recv_task.unparked {
return;
}
// Setting this flag enables the receiving end to detect that
// an unpark event happened in order to avoid unnecessarily
// parking.
recv_task.unparked = true;
recv_task.task.take()
};
if let Some(task) = task {
task.wake();
}
}
fn park(&mut self, lw: Option<&LocalWaker>) {
// TODO: clean up internal state if the task::current will fail
let task = lw.map(|lw| lw.clone().into_waker());
{
let mut sender = self.sender_task.lock().unwrap();
sender.task = task;
sender.is_parked = true;
}
// Send handle over queue
let t = self.sender_task.clone();
self.inner.parked_queue.push(t);
// Check to make sure we weren't closed after we sent our task on the
// queue
let state = decode_state(self.inner.state.load(SeqCst));
self.maybe_parked = state.is_open;
}
/// Polls the channel to determine if there is guaranteed capacity to send
/// at least one item without waiting.
///
/// # Return value
///
/// This method returns:
///
/// - `Ok(Async::Ready(_))` if there is sufficient capacity;
/// - `Ok(Async::Pending)` if the channel may not have
/// capacity, in which case the current task is queued to be notified once
/// capacity is available;
/// - `Err(SendError)` if the receiver has been dropped.
pub fn poll_ready(
&mut self,
lw: &LocalWaker
) -> Poll<Result<(), SendError>> {
let state = decode_state(self.inner.state.load(SeqCst));
if !state.is_open {
return Poll::Ready(Err(SendError {
kind: SendErrorKind::Disconnected,
}));
}
self.poll_unparked(Some(lw)).map(Ok)
}
/// Returns whether this channel is closed without needing a context.
pub fn is_closed(&self) -> bool {
!decode_state(self.inner.state.load(SeqCst)).is_open
}
/// Closes this channel from the sender side, preventing any new messages.
pub fn close_channel(&mut self) {
// There's no need to park this sender, its dropping,
// and we don't want to check for capacity, so skip
// that stuff from `do_send`.
let _ = self.do_send_nb(None);
}
fn poll_unparked(&mut self, lw: Option<&LocalWaker>) -> Poll<()> {
// First check the `maybe_parked` variable. This avoids acquiring the
// lock in most cases
if self.maybe_parked {
// Get a lock on the task handle
let mut task = self.sender_task.lock().unwrap();
if !task.is_parked {
self.maybe_parked = false;
return Poll::Ready(())
}
// At this point, an unpark request is pending, so there will be an
// unpark sometime in the future. We just need to make sure that
// the correct task will be notified.
//
// Update the task in case the `Sender` has been moved to another
// task
task.task = lw.map(|lw| lw.clone().into_waker());
Poll::Pending
} else {
Poll::Ready(())
}
}
}
impl<T> UnboundedSender<T> {
/// Check if the channel is ready to receive a message.
pub fn poll_ready(
&self,
_: &LocalWaker,
) -> Poll<Result<(), SendError>> {
self.0.poll_ready_nb()
}
/// Returns whether this channel is closed without needing a context.
pub fn is_closed(&self) -> bool {
self.0.is_closed()
}
/// Closes this channel from the sender side, preventing any new messages.
pub fn close_channel(&self) {
// There's no need to park this sender, its dropping,
// and we don't want to check for capacity, so skip
// that stuff from `do_send`.
let _ = self.0.do_send_nb(None);
}
/// Send a message on the channel.
///
/// This method should only be called after `poll_ready` has been used to
/// verify that the channel is ready to receive a message.
pub fn start_send(&mut self, msg: T) -> Result<(), SendError> {
self.0.do_send_nb(Some(msg))
.map_err(|e| e.err)
}
/// Sends a message along this channel.
///
/// This is an unbounded sender, so this function differs from `Sink::send`
/// by ensuring the return type reflects that the channel is always ready to
/// receive messages.
pub fn unbounded_send(&self, msg: T) -> Result<(), TrySendError<T>> {
self.0.do_send_nb(Some(msg))
}
}
impl<T> Clone for UnboundedSender<T> {
fn clone(&self) -> UnboundedSender<T> {
UnboundedSender(self.0.clone())
}
}
impl<T> Clone for Sender<T> {
fn clone(&self) -> Sender<T> {
// Since this atomic op isn't actually guarding any memory and we don't
// care about any orderings besides the ordering on the single atomic
// variable, a relaxed ordering is acceptable.
let mut curr = self.inner.num_senders.load(SeqCst);
loop {
// If the maximum number of senders has been reached, then fail
if curr == self.inner.max_senders() {
panic!("cannot clone `Sender` -- too many outstanding senders");
}
debug_assert!(curr < self.inner.max_senders());
let next = curr + 1;
let actual = self.inner.num_senders.compare_and_swap(curr, next, SeqCst);
// The ABA problem doesn't matter here. We only care that the
// number of senders never exceeds the maximum.
if actual == curr {
return Sender {
inner: self.inner.clone(),
sender_task: Arc::new(Mutex::new(SenderTask::new())),
maybe_parked: false,
};
}
curr = actual;
}
}
}
impl<T> Drop for Sender<T> {
fn drop(&mut self) {
// Ordering between variables don't matter here
let prev = self.inner.num_senders.fetch_sub(1, SeqCst);
if prev == 1 {
// There's no need to park this sender, its dropping,
// and we don't want to check for capacity, so skip
// that stuff from `do_send`.
let _ = self.do_send_nb(None);
}
}
}
/*
*
* ===== impl Receiver =====
*
*/
impl<T> Receiver<T> {
/// Closes the receiving half of a channel, without dropping it.
///
/// This prevents any further messages from being sent on the channel while
/// still enabling the receiver to drain messages that are buffered.
pub fn close(&mut self) {
let mut curr = self.inner.state.load(SeqCst);
loop {
let mut state = decode_state(curr);
if !state.is_open {
break
}
state.is_open = false;
let next = encode_state(&state);
match self.inner.state.compare_exchange(curr, next, SeqCst, SeqCst) {
Ok(_) => break,
Err(actual) => curr = actual,
}
}
// Wake up any threads waiting as they'll see that we've closed the
// channel and will continue on their merry way.
loop {
match unsafe { self.inner.parked_queue.pop() } {
PopResult::Data(task) => {
task.lock().unwrap().notify();
}
PopResult::Empty => break,
PopResult::Inconsistent => thread::yield_now(),
}
}
}
/// Tries to receive the next message without notifying a context if empty.
///
/// It is not recommended to call this function from inside of a future,
/// only when you've otherwise arranged to be notified when the channel is
/// no longer empty.
pub fn try_next(&mut self) -> Result<Option<T>, TryRecvError> {
match self.next_message() {
Poll::Ready(msg) => {
Ok(msg)
},
Poll::Pending => Err(TryRecvError { _inner: () }),
}
}
fn next_message(&mut self) -> Poll<Option<T>> {
// Pop off a message
loop {
match unsafe { self.inner.message_queue.pop() } {
PopResult::Data(msg) => {
// If there are any parked task handles in the parked queue,
// pop one and unpark it.
self.unpark_one();
// Decrement number of messages
self.dec_num_messages();
return Poll::Ready(msg);
}
PopResult::Empty => {
// The queue is empty, return Pending
return Poll::Pending;
}
PopResult::Inconsistent => {
// Inconsistent means that there will be a message to pop
// in a short time. This branch can only be reached if
// values are being produced from another thread, so there
// are a few ways that we can deal with this:
//
// 1) Spin
// 2) thread::yield_now()
// 3) task::current().unwrap() & return Pending
//
// For now, thread::yield_now() is used, but it would
// probably be better to spin a few times then yield.
thread::yield_now();
}
}
}
}
// Unpark a single task handle if there is one pending in the parked queue
fn unpark_one(&mut self) {
loop {
match unsafe { self.inner.parked_queue.pop() } {
PopResult::Data(task) => {
task.lock().unwrap().notify();
return;
}
PopResult::Empty => {
// Queue empty, no task to wake up.
return;
}
PopResult::Inconsistent => {
// Same as above
thread::yield_now();
}
}
}
}
// Try to park the receiver task
fn try_park(&self, lw: &LocalWaker) -> TryPark {
let curr = self.inner.state.load(SeqCst);
let state = decode_state(curr);
// If the channel is closed, then there is no need to park.
if !state.is_open && state.num_messages == 0 {
return TryPark::Closed;
}
// First, track the task in the `recv_task` slot
let mut recv_task = self.inner.recv_task.lock().unwrap();
if recv_task.unparked {
// Consume the `unpark` signal without actually parking
recv_task.unparked = false;
return TryPark::NotEmpty;
}
recv_task.task = Some(lw.clone().into_waker());
TryPark::Parked
}
fn dec_num_messages(&self) {
let mut curr = self.inner.state.load(SeqCst);
loop {
let mut state = decode_state(curr);
state.num_messages -= 1;
let next = encode_state(&state);
match self.inner.state.compare_exchange(curr, next, SeqCst, SeqCst) {
Ok(_) => break,
Err(actual) => curr = actual,
}
}
}
}
// The receiver does not ever take a Pin to the inner T
impl<T> Unpin for Receiver<T> {}
impl<T> Stream for Receiver<T> {
type Item = T;
fn poll_next(
mut self: Pin<&mut Self>,
lw: &LocalWaker,
) -> Poll<Option<T>> {
loop {
// Try to read a message off of the message queue.
let msg = match self.next_message() {
Poll::Ready(msg) => msg,
Poll::Pending => {
// There are no messages to read, in this case, attempt to
// park. The act of parking will verify that the channel is
// still empty after the park operation has completed.
match self.try_park(lw) {
TryPark::Parked => {
// The task was parked, and the channel is still
// empty, return Pending.
return Poll::Pending;
}
TryPark::Closed => {
// The channel is closed, there will be no further
// messages.
return Poll::Ready(None);
}
TryPark::NotEmpty => {
// A message has been sent while attempting to
// park. Loop again, the next iteration is
// guaranteed to get the message.
continue;
}
}
}
};
// Return the message
return Poll::Ready(msg);
}
}
}
impl<T> Drop for Receiver<T> {
fn drop(&mut self) {
// Drain the channel of all pending messages
self.close();
while self.next_message().is_ready() {
// ...
}
}
}
impl<T> UnboundedReceiver<T> {
/// Closes the receiving half of the channel, without dropping it.
///
/// This prevents any further messages from being sent on the channel while
/// still enabling the receiver to drain messages that are buffered.
pub fn close(&mut self) {
self.0.close();
}
/// Tries to receive the next message without notifying a context if empty.
///
/// It is not recommended to call this function from inside of a future,
/// only when you've otherwise arranged to be notified when the channel is
/// no longer empty.
pub fn try_next(&mut self) -> Result<Option<T>, TryRecvError> {
self.0.try_next()
}
}
impl<T> Stream for UnboundedReceiver<T> {
type Item = T;
fn poll_next(
mut self: Pin<&mut Self>,
lw: &LocalWaker,
) -> Poll<Option<T>> {
Pin::new(&mut self.0).poll_next(lw)
}
}
/*
*
* ===== impl Inner =====
*
*/
impl<T> Inner<T> {
// The return value is such that the total number of messages that can be
// enqueued into the channel will never exceed MAX_CAPACITY
fn max_senders(&self) -> usize {
match self.buffer {
Some(buffer) => MAX_CAPACITY - buffer,
None => MAX_BUFFER,
}
}
}
unsafe impl<T: Send> Send for Inner<T> {}
unsafe impl<T: Send> Sync for Inner<T> {}
/*
*
* ===== Helpers =====
*
*/
fn decode_state(num: usize) -> State {
State {
is_open: num & OPEN_MASK == OPEN_MASK,
num_messages: num & MAX_CAPACITY,
}
}
fn encode_state(state: &State) -> usize {
let mut num = state.num_messages;
if state.is_open {
num |= OPEN_MASK;
}
num
}
| 32.330869 | 87 | 0.579984 |
edabc9fdc682ca7b47bb2d6c9b79e020c0cedffe | 2,270 | use crypto::digest::Digest;
use crypto::hmac::Hmac;
use crypto::mac::{Mac, MacResult};
use std::borrow::{Borrow, BorrowMut};
pub fn input_hex<D: Digest, T:Borrow<str>>(h: &mut Hmac<D>, input: T) {
h.input(crate::utils::h2b(input).unwrap().as_ref())
}
pub fn input_hex_rev<D: Digest, T:Borrow<str>>(h: &mut Hmac<D>, input: T) {
h.input(crate::utils::h2b_rev(input).unwrap().as_ref())
}
pub fn result_u8<D: Digest>(h: &mut Hmac<D>) -> Box<[u8]> {
let len = h.output_bytes();
let mut v = Vec::<u8>::with_capacity(len);
unsafe { v.set_len(len); }
h.raw_result(v.as_mut_slice());
v.into_boxed_slice()
}
pub fn result_hex<D: Digest>(h: &mut Hmac<D>) -> String {
crate::utils::b2h(result_u8(h))
}
pub fn result_hex_rev<D: Digest>(h: &mut Hmac<D>) -> String {
crate::utils::b2h_rev(result_u8(h))
}
pub fn u8_to_u8<D: Digest, T:Borrow<[u8]>>(h: &mut Hmac<D>, input: T) -> Box<[u8]> {
h.reset();
h.input(input.borrow());
result_u8(h)
}
pub fn u8_to_hex<D:Digest, T:Borrow<[u8]>>(h: &mut Hmac<D>, input: T) -> String {
h.reset();
h.input(input.borrow());
result_hex(h)
}
pub fn u8_to_hex_rev<D:Digest, T:Borrow<[u8]>>(h: &mut Hmac<D>, input: T) -> String {
h.reset();
h.input(input.borrow());
result_hex_rev(h)
}
pub fn hex_to_u8<D:Digest, T:Borrow<str>>(h: &mut Hmac<D>, input: T) -> Box<[u8]> {
h.reset();
input_hex(h, input.borrow());
result_u8(h)
}
pub fn hex_to_hex<D:Digest, T:Borrow<str>>(h: &mut Hmac<D>, input: T) -> String {
h.reset();
input_hex(h, input.borrow());
result_hex(h)
}
pub fn hex_to_u8_rev<D:Digest, T:Borrow<str>>(h: &mut Hmac<D>, input: T) -> Box<[u8]> {
h.reset();
input_hex_rev(h, input.borrow());
result_u8(h)
}
pub fn hex_to_hex_rev<D: Digest, T:Borrow<str>>(h: &mut Hmac<D>, input: T) -> String {
h.reset();
input_hex_rev(h, input.borrow());
result_hex(h)
}
#[test]
fn test_hmac_sha512() {
let key:&[u8] = b"Kagamine Rin";
let input:&[u8] = b"Hatsune Miku";
let expect = "5b274c80deabf563b1e84176acc0dbf944f9d883293b98f004eeadfdfd5856af65da1d332628795766ebd034f37b94327bd10b92edad735014ddd094e1c504bd";
let mut hmac = crypto::hmac::Hmac::new(crypto::sha2::Sha512::new(), key);
assert_eq!(expect, super::u8_to_hex(&mut hmac, input));
}
| 31.971831 | 147 | 0.640529 |
16e5bc7865a167fc726d54c6c6a69fec809ce12e | 8,193 | use std::collections::hash_map;
use std::sync::atomic::{AtomicU32, AtomicUsize, Ordering};
use std::sync::Arc;
use anyhow::Result;
use parking_lot::Mutex;
use tiny_adnl::utils::*;
use tokio::sync::oneshot;
use ton_types::UInt256;
use super::shard_utils::*;
pub struct PendingMessagesQueue {
min_expire_at: AtomicU32,
entries: Mutex<FxHashMap<PendingMessageId, PendingMessage>>,
entry_count: AtomicUsize,
}
impl PendingMessagesQueue {
pub fn new(capacity: usize) -> Arc<Self> {
Arc::new(Self {
min_expire_at: AtomicU32::new(u32::MAX),
entries: Mutex::new(FxHashMap::with_capacity_and_hasher(
capacity,
Default::default(),
)),
entry_count: Default::default(),
})
}
pub fn len(&self) -> usize {
self.entry_count.load(Ordering::Acquire)
}
pub fn add_message(
&self,
account: UInt256,
message_hash: UInt256,
expire_at: u32,
) -> Result<MessageStatusRx> {
let mut entries = self.entries.lock();
match entries.entry(PendingMessageId {
account,
message_hash,
}) {
hash_map::Entry::Vacant(entry) => {
let (tx, rx) = oneshot::channel();
entry.insert(PendingMessage {
tx: Some(tx),
expire_at,
});
self.min_expire_at.fetch_min(expire_at, Ordering::AcqRel);
self.entry_count.fetch_add(1, Ordering::Release);
Ok(rx)
}
hash_map::Entry::Occupied(_) => Err(PendingMessagesQueueError::AlreadyExists.into()),
}
}
pub fn deliver_message(&self, account: UInt256, message_hash: UInt256) {
let mut entries = self.entries.lock();
let mut message = match entries.remove(&PendingMessageId {
account,
message_hash,
}) {
Some(message) => message,
None => return,
};
self.entry_count.fetch_sub(1, Ordering::Release);
if let Some(tx) = message.tx.take() {
tx.send(MessageStatus::Delivered).ok();
}
let current_min_expire_at = self.min_expire_at.load(Ordering::Acquire);
if current_min_expire_at != message.expire_at {
return;
}
let mut min_expire_at: u32 = u32::MAX;
entries.iter().for_each(|(_, item)| {
if item.expire_at < min_expire_at {
min_expire_at = item.expire_at;
}
});
self.min_expire_at.store(min_expire_at, Ordering::Release);
}
pub fn update(&self, shard: &ton_block::ShardIdent, current_utime: u32) {
let current_min_expire_at = self.min_expire_at.load(Ordering::Acquire);
if current_utime <= current_min_expire_at {
return;
}
let mut min_expire_at: u32 = u32::MAX;
let mut entries = self.entries.lock();
entries.retain(|id, item| {
if current_utime <= item.expire_at || !contains_account(shard, &id.account) {
if item.expire_at < min_expire_at {
min_expire_at = item.expire_at;
}
return true;
}
if let Some(tx) = item.tx.take() {
tx.send(MessageStatus::Expired).ok();
}
false
});
self.min_expire_at.store(min_expire_at, Ordering::Release);
self.entry_count.store(entries.len(), Ordering::Release);
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum MessageStatus {
Delivered,
Expired,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
struct PendingMessageId {
account: UInt256,
message_hash: UInt256,
}
struct PendingMessage {
tx: Option<MessageStatusTx>,
expire_at: u32,
}
type MessageStatusTx = oneshot::Sender<MessageStatus>;
type MessageStatusRx = oneshot::Receiver<MessageStatus>;
#[derive(thiserror::Error, Debug)]
enum PendingMessagesQueueError {
#[error("Already exists")]
AlreadyExists,
}
#[cfg(test)]
mod tests {
use super::*;
fn make_hash(id: u8) -> UInt256 {
let mut hash = [0; 32];
hash[0] = id;
UInt256::from(hash)
}
fn make_queue() -> Arc<PendingMessagesQueue> {
let queue = PendingMessagesQueue::new(10);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), u32::MAX);
queue
}
#[tokio::test]
async fn normal_message_flow() {
let queue = make_queue();
// Add message
let rx = queue.add_message(make_hash(0), make_hash(0), 10).unwrap();
// (Adding same message should fail)
assert!(queue.add_message(make_hash(0), make_hash(0), 20).is_err());
// Adding new message must update expiration
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 10);
// Deliver message
queue.deliver_message(make_hash(0), make_hash(0));
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), u32::MAX);
assert_eq!(rx.await.unwrap(), MessageStatus::Delivered);
}
#[tokio::test]
async fn expired_message_flow() {
let queue = make_queue();
// Add message
let rx = queue.add_message(make_hash(0), make_hash(0), 10).unwrap();
// Update before expiration time must not do anything
queue.update(&ton_block::ShardIdent::masterchain(), 5);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 10);
// Update after expiration time must remove message
queue.update(&ton_block::ShardIdent::masterchain(), 15);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), u32::MAX);
assert_eq!(rx.await.unwrap(), MessageStatus::Expired);
}
#[tokio::test]
async fn multiple_messages_expiration_flow() {
let queue = make_queue();
// Add messages
let rx2 = queue.add_message(make_hash(1), make_hash(1), 20).unwrap();
let rx1 = queue.add_message(make_hash(0), make_hash(0), 10).unwrap();
queue.update(&ton_block::ShardIdent::masterchain(), 5);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 10);
queue.update(&ton_block::ShardIdent::masterchain(), 10);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 10);
queue.update(&ton_block::ShardIdent::masterchain(), 15);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 20);
queue.update(&ton_block::ShardIdent::masterchain(), 25);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), u32::MAX);
assert_eq!(rx1.await.unwrap(), MessageStatus::Expired);
assert_eq!(rx2.await.unwrap(), MessageStatus::Expired);
}
#[tokio::test]
async fn multiple_messages_deplivery_flow() {
let queue = make_queue();
// Add messages
let rx2 = queue.add_message(make_hash(1), make_hash(1), 20).unwrap();
let rx1 = queue.add_message(make_hash(0), make_hash(0), 10).unwrap();
queue.update(&ton_block::ShardIdent::masterchain(), 5);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 10);
queue.deliver_message(make_hash(1), make_hash(1));
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 10);
queue.update(&ton_block::ShardIdent::masterchain(), 15);
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), u32::MAX);
assert_eq!(rx1.await.unwrap(), MessageStatus::Expired);
assert_eq!(rx2.await.unwrap(), MessageStatus::Delivered);
// Add messages
let rx1 = queue.add_message(make_hash(0), make_hash(0), 10).unwrap();
let rx2 = queue.add_message(make_hash(1), make_hash(1), 20).unwrap();
queue.deliver_message(make_hash(0), make_hash(0));
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), 20);
queue.deliver_message(make_hash(1), make_hash(1));
assert_eq!(queue.min_expire_at.load(Ordering::Acquire), u32::MAX);
assert_eq!(rx1.await.unwrap(), MessageStatus::Delivered);
assert_eq!(rx2.await.unwrap(), MessageStatus::Delivered);
}
}
| 31.879377 | 97 | 0.609667 |
643c7aa974d3f3280580bb818370d7d6a1b1f6bd | 107,682 | //! Main module defining the lexer and parser.
use crate::any::{Dynamic, Union};
use crate::calc_fn_hash;
use crate::engine::{
make_getter, make_setter, Engine, FN_ANONYMOUS, KEYWORD_THIS, MARKER_BLOCK, MARKER_EXPR,
MARKER_IDENT,
};
use crate::error::{LexError, ParseError, ParseErrorType};
use crate::fn_native::Shared;
use crate::module::{Module, ModuleRef};
use crate::optimize::{optimize_into_ast, OptimizationLevel};
use crate::scope::{EntryType as ScopeEntryType, Scope};
use crate::syntax::FnCustomSyntaxEval;
use crate::token::{is_valid_identifier, Position, Token, TokenStream};
use crate::utils::{StaticVec, StraightHasherBuilder};
use crate::stdlib::{
borrow::Cow,
boxed::Box,
char,
collections::HashMap,
fmt, format,
hash::{Hash, Hasher},
iter::empty,
mem,
num::NonZeroUsize,
ops::Add,
string::{String, ToString},
vec,
vec::Vec,
};
#[cfg(not(feature = "no_std"))]
use crate::stdlib::collections::hash_map::DefaultHasher;
#[cfg(feature = "no_std")]
use ahash::AHasher;
/// The system integer type.
///
/// If the `only_i32` feature is enabled, this will be `i32` instead.
#[cfg(not(feature = "only_i32"))]
pub type INT = i64;
/// The system integer type.
///
/// If the `only_i32` feature is not enabled, this will be `i64` instead.
#[cfg(feature = "only_i32")]
pub type INT = i32;
/// The system floating-point type.
///
/// Not available under the `no_float` feature.
#[cfg(not(feature = "no_float"))]
pub type FLOAT = f64;
type PERR = ParseErrorType;
pub use crate::utils::ImmutableString;
type FunctionsLib = HashMap<u64, ScriptFnDef, StraightHasherBuilder>;
/// Compiled AST (abstract syntax tree) of a Rhai script.
///
/// Currently, `AST` is neither `Send` nor `Sync`. Turn on the `sync` feature to make it `Send + Sync`.
#[derive(Debug, Clone, Default)]
pub struct AST(
/// Global statements.
Vec<Stmt>,
/// Script-defined functions.
Module,
);
impl AST {
/// Create a new `AST`.
pub fn new(statements: Vec<Stmt>, lib: Module) -> Self {
Self(statements, lib)
}
/// Get the statements.
#[cfg(not(feature = "internals"))]
pub(crate) fn statements(&self) -> &[Stmt] {
&self.0
}
/// Get the statements.
#[cfg(feature = "internals")]
#[deprecated(note = "this method is volatile and may change")]
pub fn statements(&self) -> &[Stmt] {
&self.0
}
/// Get a mutable reference to the statements.
pub(crate) fn statements_mut(&mut self) -> &mut Vec<Stmt> {
&mut self.0
}
/// Get the internal `Module` containing all script-defined functions.
#[cfg(not(feature = "internals"))]
pub(crate) fn lib(&self) -> &Module {
&self.1
}
/// Get the internal `Module` containing all script-defined functions.
#[cfg(feature = "internals")]
#[deprecated(note = "this method is volatile and may change")]
pub fn lib(&self) -> &Module {
&self.1
}
/// Clone the `AST`'s functions into a new `AST`.
/// No statements are cloned.
///
/// This operation is cheap because functions are shared.
pub fn clone_functions_only(&self) -> Self {
self.clone_functions_only_filtered(|_, _, _| true)
}
/// Clone the `AST`'s functions into a new `AST` based on a filter predicate.
/// No statements are cloned.
///
/// This operation is cheap because functions are shared.
pub fn clone_functions_only_filtered(
&self,
filter: impl Fn(FnAccess, &str, usize) -> bool,
) -> Self {
let mut functions: Module = Default::default();
functions.merge_filtered(&self.1, filter);
Self(Default::default(), functions)
}
/// Clone the `AST`'s script statements into a new `AST`.
/// No functions are cloned.
pub fn clone_statements_only(&self) -> Self {
Self(self.0.clone(), Default::default())
}
/// Merge two `AST` into one. Both `AST`'s are untouched and a new, merged, version
/// is returned.
///
/// The second `AST` is simply appended to the end of the first _without any processing_.
/// Thus, the return value of the first `AST` (if using expression-statement syntax) is buried.
/// Of course, if the first `AST` uses a `return` statement at the end, then
/// the second `AST` will essentially be dead code.
///
/// All script-defined functions in the second `AST` overwrite similarly-named functions
/// in the first `AST` with the same number of parameters.
///
/// # Example
///
/// ```
/// # fn main() -> Result<(), Box<rhai::EvalAltResult>> {
/// # #[cfg(not(feature = "no_function"))]
/// # {
/// use rhai::Engine;
///
/// let engine = Engine::new();
///
/// let ast1 = engine.compile(r#"
/// fn foo(x) { 42 + x }
/// foo(1)
/// "#)?;
///
/// let ast2 = engine.compile(r#"
/// fn foo(n) { "hello" + n }
/// foo("!")
/// "#)?;
///
/// let ast = ast1.merge(&ast2); // Merge 'ast2' into 'ast1'
///
/// // Notice that using the '+' operator also works:
/// // let ast = &ast1 + &ast2;
///
/// // 'ast' is essentially:
/// //
/// // fn foo(n) { "hello" + n } // <- definition of first 'foo' is overwritten
/// // foo(1) // <- notice this will be "hello1" instead of 43,
/// // // but it is no longer the return value
/// // foo("!") // returns "hello!"
///
/// // Evaluate it
/// assert_eq!(engine.eval_ast::<String>(&ast)?, "hello!");
/// # }
/// # Ok(())
/// # }
/// ```
pub fn merge(&self, other: &Self) -> Self {
self.merge_filtered(other, |_, _, _| true)
}
/// Merge two `AST` into one. Both `AST`'s are untouched and a new, merged, version
/// is returned.
///
/// The second `AST` is simply appended to the end of the first _without any processing_.
/// Thus, the return value of the first `AST` (if using expression-statement syntax) is buried.
/// Of course, if the first `AST` uses a `return` statement at the end, then
/// the second `AST` will essentially be dead code.
///
/// All script-defined functions in the second `AST` are first selected based on a filter
/// predicate, then overwrite similarly-named functions in the first `AST` with the
/// same number of parameters.
///
/// # Example
///
/// ```
/// # fn main() -> Result<(), Box<rhai::EvalAltResult>> {
/// # #[cfg(not(feature = "no_function"))]
/// # {
/// use rhai::Engine;
///
/// let engine = Engine::new();
///
/// let ast1 = engine.compile(r#"
/// fn foo(x) { 42 + x }
/// foo(1)
/// "#)?;
///
/// let ast2 = engine.compile(r#"
/// fn foo(n) { "hello" + n }
/// fn error() { 0 }
/// foo("!")
/// "#)?;
///
/// // Merge 'ast2', picking only 'error()' but not 'foo(_)', into 'ast1'
/// let ast = ast1.merge_filtered(&ast2, |_, name, params| name == "error" && params == 0);
///
/// // 'ast' is essentially:
/// //
/// // fn foo(n) { 42 + n } // <- definition of 'ast1::foo' is not overwritten
/// // // because 'ast2::foo' is filtered away
/// // foo(1) // <- notice this will be 43 instead of "hello1",
/// // // but it is no longer the return value
/// // fn error() { 0 } // <- this function passes the filter and is merged
/// // foo("!") // <- returns "42!"
///
/// // Evaluate it
/// assert_eq!(engine.eval_ast::<String>(&ast)?, "42!");
/// # }
/// # Ok(())
/// # }
/// ```
pub fn merge_filtered(
&self,
other: &Self,
filter: impl Fn(FnAccess, &str, usize) -> bool,
) -> Self {
let Self(statements, functions) = self;
let ast = match (statements.is_empty(), other.0.is_empty()) {
(false, false) => {
let mut statements = statements.clone();
statements.extend(other.0.iter().cloned());
statements
}
(false, true) => statements.clone(),
(true, false) => other.0.clone(),
(true, true) => vec![],
};
let mut functions = functions.clone();
functions.merge_filtered(&other.1, filter);
Self::new(ast, functions)
}
/// Filter out the functions, retaining only some based on a filter predicate.
///
/// # Example
///
/// ```
/// # fn main() -> Result<(), Box<rhai::EvalAltResult>> {
/// # #[cfg(not(feature = "no_function"))]
/// # {
/// use rhai::Engine;
///
/// let engine = Engine::new();
///
/// let mut ast = engine.compile(r#"
/// fn foo(n) { n + 1 }
/// fn bar() { print("hello"); }
/// "#)?;
///
/// // Remove all functions except 'foo(_)'
/// ast.retain_functions(|_, name, params| name == "foo" && params == 1);
/// # }
/// # Ok(())
/// # }
/// ```
#[cfg(not(feature = "no_function"))]
pub fn retain_functions(&mut self, filter: impl Fn(FnAccess, &str, usize) -> bool) {
self.1.retain_functions(filter);
}
/// Clear all function definitions in the `AST`.
#[cfg(not(feature = "no_function"))]
pub fn clear_functions(&mut self) {
self.1 = Default::default();
}
/// Clear all statements in the `AST`, leaving only function definitions.
pub fn clear_statements(&mut self) {
self.0 = vec![];
}
}
impl Add<Self> for &AST {
type Output = AST;
fn add(self, rhs: Self) -> Self::Output {
self.merge(rhs)
}
}
impl AsRef<[Stmt]> for AST {
fn as_ref(&self) -> &[Stmt] {
self.statements()
}
}
impl AsRef<Module> for AST {
fn as_ref(&self) -> &Module {
self.lib()
}
}
/// A type representing the access mode of a scripted function.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum FnAccess {
/// Private function.
Private,
/// Public function.
Public,
}
impl fmt::Display for FnAccess {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Private => write!(f, "private"),
Self::Public => write!(f, "public"),
}
}
}
/// A scripted function definition.
#[derive(Debug, Clone, Hash)]
pub struct ScriptFnDef {
/// Function name.
pub name: String,
/// Function access mode.
pub access: FnAccess,
/// Names of function parameters.
pub params: StaticVec<String>,
/// Function body.
pub body: Stmt,
/// Position of the function definition.
pub pos: Position,
}
impl fmt::Display for ScriptFnDef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}{}({})",
match self.access {
FnAccess::Public => "",
FnAccess::Private => "private ",
},
self.name,
self.params
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>()
.join(",")
)
}
}
/// `return`/`throw` statement.
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub enum ReturnType {
/// `return` statement.
Return,
/// `throw` statement.
Exception,
}
#[derive(Clone)]
struct ParseState<'e> {
/// Reference to the scripting `Engine`.
engine: &'e Engine,
/// Encapsulates a local stack with variable names to simulate an actual runtime scope.
stack: Vec<(String, ScopeEntryType)>,
/// Encapsulates a local stack with variable names to simulate an actual runtime scope.
modules: Vec<String>,
/// Maximum levels of expression nesting.
max_expr_depth: usize,
/// Maximum levels of expression nesting in functions.
max_function_expr_depth: usize,
}
impl<'e> ParseState<'e> {
/// Create a new `ParseState`.
pub fn new(engine: &'e Engine, max_expr_depth: usize, max_function_expr_depth: usize) -> Self {
Self {
engine,
max_expr_depth,
max_function_expr_depth,
stack: Default::default(),
modules: Default::default(),
}
}
/// Find a variable by name in the `ParseState`, searching in reverse.
/// The return value is the offset to be deducted from `Stack::len`,
/// i.e. the top element of the `ParseState` is offset 1.
/// Return zero when the variable name is not found in the `ParseState`.
pub fn find_var(&self, name: &str) -> Option<NonZeroUsize> {
self.stack
.iter()
.rev()
.enumerate()
.find(|(_, (n, _))| *n == name)
.and_then(|(i, _)| NonZeroUsize::new(i + 1))
}
/// Find a module by name in the `ParseState`, searching in reverse.
/// The return value is the offset to be deducted from `Stack::len`,
/// i.e. the top element of the `ParseState` is offset 1.
/// Return zero when the variable name is not found in the `ParseState`.
pub fn find_module(&self, name: &str) -> Option<NonZeroUsize> {
self.modules
.iter()
.rev()
.enumerate()
.find(|(_, n)| *n == name)
.and_then(|(i, _)| NonZeroUsize::new(i + 1))
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
/// A type that encapsulates all the settings for a particular parsing function.
struct ParseSettings {
/// Current position.
pos: Position,
/// Is the construct being parsed located at global level?
is_global: bool,
/// Is the construct being parsed located at function definition level?
is_function_scope: bool,
/// Is the current position inside a loop?
is_breakable: bool,
/// Is anonymous function allowed?
allow_anonymous_fn: bool,
/// Is if-expression allowed?
allow_if_expr: bool,
/// Is statement-expression allowed?
allow_stmt_expr: bool,
/// Current expression nesting level.
level: usize,
}
impl ParseSettings {
/// Create a new `ParseSettings` with one higher expression level.
pub fn level_up(&self) -> Self {
Self {
level: self.level + 1,
..*self
}
}
/// Make sure that the current level of expression nesting is within the maximum limit.
pub fn ensure_level_within_max_limit(&self, limit: usize) -> Result<(), ParseError> {
if limit == 0 {
Ok(())
} else if self.level > limit {
Err(PERR::ExprTooDeep.into_err(self.pos))
} else {
Ok(())
}
}
}
/// A statement.
///
/// Each variant is at most one pointer in size (for speed),
/// with everything being allocated together in one single tuple.
#[derive(Debug, Clone, Hash)]
pub enum Stmt {
/// No-op.
Noop(Position),
/// if expr { stmt } else { stmt }
IfThenElse(Box<(Expr, Stmt, Option<Stmt>)>),
/// while expr { stmt }
While(Box<(Expr, Stmt)>),
/// loop { stmt }
Loop(Box<Stmt>),
/// for id in expr { stmt }
For(Box<(String, Expr, Stmt)>),
/// let id = expr
Let(Box<((String, Position), Option<Expr>)>),
/// const id = expr
Const(Box<((String, Position), Expr)>),
/// { stmt; ... }
Block(Box<(StaticVec<Stmt>, Position)>),
/// { stmt }
Expr(Box<Expr>),
/// continue
Continue(Position),
/// break
Break(Position),
/// return/throw
ReturnWithVal(Box<((ReturnType, Position), Option<Expr>)>),
/// import expr as module
Import(Box<(Expr, (String, Position))>),
/// expr id as name, ...
Export(Box<StaticVec<((String, Position), Option<(String, Position)>)>>),
}
impl Default for Stmt {
fn default() -> Self {
Self::Noop(Default::default())
}
}
impl Stmt {
/// Get the `Position` of this statement.
pub fn position(&self) -> Position {
match self {
Stmt::Noop(pos) | Stmt::Continue(pos) | Stmt::Break(pos) => *pos,
Stmt::Let(x) => (x.0).1,
Stmt::Const(x) => (x.0).1,
Stmt::ReturnWithVal(x) => (x.0).1,
Stmt::Block(x) => x.1,
Stmt::IfThenElse(x) => x.0.position(),
Stmt::Expr(x) => x.position(),
Stmt::While(x) => x.1.position(),
Stmt::Loop(x) => x.position(),
Stmt::For(x) => x.2.position(),
Stmt::Import(x) => (x.1).1,
Stmt::Export(x) => (x.get(0).0).1,
}
}
/// Is this statement self-terminated (i.e. no need for a semicolon terminator)?
pub fn is_self_terminated(&self) -> bool {
match self {
Stmt::IfThenElse(_)
| Stmt::While(_)
| Stmt::Loop(_)
| Stmt::For(_)
| Stmt::Block(_) => true,
// A No-op requires a semicolon in order to know it is an empty statement!
Stmt::Noop(_) => false,
Stmt::Let(_)
| Stmt::Const(_)
| Stmt::Import(_)
| Stmt::Export(_)
| Stmt::Expr(_)
| Stmt::Continue(_)
| Stmt::Break(_)
| Stmt::ReturnWithVal(_) => false,
}
}
/// Is this statement _pure_?
pub fn is_pure(&self) -> bool {
match self {
Stmt::Noop(_) => true,
Stmt::Expr(expr) => expr.is_pure(),
Stmt::IfThenElse(x) if x.2.is_some() => {
x.0.is_pure() && x.1.is_pure() && x.2.as_ref().unwrap().is_pure()
}
Stmt::IfThenElse(x) => x.1.is_pure(),
Stmt::While(x) => x.0.is_pure() && x.1.is_pure(),
Stmt::Loop(x) => x.is_pure(),
Stmt::For(x) => x.1.is_pure() && x.2.is_pure(),
Stmt::Let(_) | Stmt::Const(_) => false,
Stmt::Block(x) => x.0.iter().all(Stmt::is_pure),
Stmt::Continue(_) | Stmt::Break(_) | Stmt::ReturnWithVal(_) => false,
Stmt::Import(_) => false,
Stmt::Export(_) => false,
}
}
}
#[derive(Clone)]
pub struct CustomExpr(pub StaticVec<Expr>, pub Shared<FnCustomSyntaxEval>);
impl fmt::Debug for CustomExpr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl Hash for CustomExpr {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
#[cfg(not(feature = "no_float"))]
#[derive(Debug, PartialEq, PartialOrd, Clone)]
pub struct FloatWrapper(pub FLOAT, pub Position);
#[cfg(not(feature = "no_float"))]
impl Hash for FloatWrapper {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write(&self.0.to_le_bytes());
self.1.hash(state);
}
}
/// An expression.
///
/// Each variant is at most one pointer in size (for speed),
/// with everything being allocated together in one single tuple.
#[derive(Debug, Clone, Hash)]
pub enum Expr {
/// Integer constant.
IntegerConstant(Box<(INT, Position)>),
/// Floating-point constant.
#[cfg(not(feature = "no_float"))]
FloatConstant(Box<FloatWrapper>),
/// Character constant.
CharConstant(Box<(char, Position)>),
/// String constant.
StringConstant(Box<(ImmutableString, Position)>),
/// FnPtr constant.
FnPointer(Box<(ImmutableString, Position)>),
/// Variable access - ((variable name, position), optional modules, hash, optional index)
Variable(
Box<(
(String, Position),
Option<Box<ModuleRef>>,
u64,
Option<NonZeroUsize>,
)>,
),
/// Property access.
Property(Box<((ImmutableString, String, String), Position)>),
/// { stmt }
Stmt(Box<(Stmt, Position)>),
/// Wrapped expression - should not be optimized away.
Expr(Box<Expr>),
/// func(expr, ... ) - ((function name, native_only, position), optional modules, hash, arguments, optional default value)
/// Use `Cow<'static, str>` because a lot of operators (e.g. `==`, `>=`) are implemented as function calls
/// and the function names are predictable, so no need to allocate a new `String`.
FnCall(
Box<(
(Cow<'static, str>, bool, Position),
Option<Box<ModuleRef>>,
u64,
StaticVec<Expr>,
Option<bool>,
)>,
),
/// expr op= expr
Assignment(Box<(Expr, Cow<'static, str>, Expr, Position)>),
/// lhs.rhs
Dot(Box<(Expr, Expr, Position)>),
/// expr[expr]
Index(Box<(Expr, Expr, Position)>),
/// [ expr, ... ]
Array(Box<(StaticVec<Expr>, Position)>),
/// #{ name:expr, ... }
Map(Box<(StaticVec<((ImmutableString, Position), Expr)>, Position)>),
/// lhs in rhs
In(Box<(Expr, Expr, Position)>),
/// lhs && rhs
And(Box<(Expr, Expr, Position)>),
/// lhs || rhs
Or(Box<(Expr, Expr, Position)>),
/// true
True(Position),
/// false
False(Position),
/// ()
Unit(Position),
/// Custom syntax
Custom(Box<(CustomExpr, Position)>),
}
impl Default for Expr {
fn default() -> Self {
Self::Unit(Default::default())
}
}
impl Expr {
/// Get the `Dynamic` value of a constant expression.
///
/// # Panics
///
/// Panics when the expression is not constant.
pub fn get_constant_value(&self) -> Dynamic {
match self {
Self::Expr(x) => x.get_constant_value(),
Self::IntegerConstant(x) => x.0.into(),
#[cfg(not(feature = "no_float"))]
Self::FloatConstant(x) => x.0.into(),
Self::CharConstant(x) => x.0.into(),
Self::StringConstant(x) => x.0.clone().into(),
Self::True(_) => true.into(),
Self::False(_) => false.into(),
Self::Unit(_) => ().into(),
#[cfg(not(feature = "no_index"))]
Self::Array(x) if x.0.iter().all(Self::is_constant) => Dynamic(Union::Array(Box::new(
x.0.iter().map(Self::get_constant_value).collect::<Vec<_>>(),
))),
#[cfg(not(feature = "no_object"))]
Self::Map(x) if x.0.iter().all(|(_, v)| v.is_constant()) => {
Dynamic(Union::Map(Box::new(
x.0.iter()
.map(|((k, _), v)| (k.clone(), v.get_constant_value()))
.collect::<HashMap<_, _>>(),
)))
}
_ => unreachable!("cannot get value of non-constant expression"),
}
}
/// Get the display value of a constant expression.
///
/// # Panics
///
/// Panics when the expression is not constant.
pub fn get_constant_str(&self) -> String {
match self {
Self::Expr(x) => x.get_constant_str(),
#[cfg(not(feature = "no_float"))]
Self::FloatConstant(x) => x.0.to_string(),
Self::IntegerConstant(x) => x.0.to_string(),
Self::CharConstant(x) => x.0.to_string(),
Self::StringConstant(_) => "string".to_string(),
Self::True(_) => "true".to_string(),
Self::False(_) => "false".to_string(),
Self::Unit(_) => "()".to_string(),
Self::Array(x) if x.0.iter().all(Self::is_constant) => "array".to_string(),
_ => unreachable!("cannot get value of non-constant expression"),
}
}
/// Get the `Position` of the expression.
pub fn position(&self) -> Position {
match self {
Self::Expr(x) => x.position(),
#[cfg(not(feature = "no_float"))]
Self::FloatConstant(x) => x.1,
Self::IntegerConstant(x) => x.1,
Self::CharConstant(x) => x.1,
Self::StringConstant(x) => x.1,
Self::FnPointer(x) => x.1,
Self::Array(x) => x.1,
Self::Map(x) => x.1,
Self::Property(x) => x.1,
Self::Stmt(x) => x.1,
Self::Variable(x) => (x.0).1,
Self::FnCall(x) => (x.0).2,
Self::Assignment(x) => x.0.position(),
Self::And(x) | Self::Or(x) | Self::In(x) => x.2,
Self::True(pos) | Self::False(pos) | Self::Unit(pos) => *pos,
Self::Dot(x) | Self::Index(x) => x.0.position(),
Self::Custom(x) => x.1,
}
}
/// Override the `Position` of the expression.
pub(crate) fn set_position(mut self, new_pos: Position) -> Self {
match &mut self {
Self::Expr(ref mut x) => {
let expr = mem::take(x);
*x = Box::new(expr.set_position(new_pos));
}
#[cfg(not(feature = "no_float"))]
Self::FloatConstant(x) => x.1 = new_pos,
Self::IntegerConstant(x) => x.1 = new_pos,
Self::CharConstant(x) => x.1 = new_pos,
Self::StringConstant(x) => x.1 = new_pos,
Self::FnPointer(x) => x.1 = new_pos,
Self::Array(x) => x.1 = new_pos,
Self::Map(x) => x.1 = new_pos,
Self::Variable(x) => (x.0).1 = new_pos,
Self::Property(x) => x.1 = new_pos,
Self::Stmt(x) => x.1 = new_pos,
Self::FnCall(x) => (x.0).2 = new_pos,
Self::And(x) => x.2 = new_pos,
Self::Or(x) => x.2 = new_pos,
Self::In(x) => x.2 = new_pos,
Self::True(pos) => *pos = new_pos,
Self::False(pos) => *pos = new_pos,
Self::Unit(pos) => *pos = new_pos,
Self::Assignment(x) => x.3 = new_pos,
Self::Dot(x) => x.2 = new_pos,
Self::Index(x) => x.2 = new_pos,
Self::Custom(x) => x.1 = new_pos,
}
self
}
/// Is the expression pure?
///
/// A pure expression has no side effects.
pub fn is_pure(&self) -> bool {
match self {
Self::Expr(x) => x.is_pure(),
Self::Array(x) => x.0.iter().all(Self::is_pure),
Self::Index(x) | Self::And(x) | Self::Or(x) | Self::In(x) => {
let (lhs, rhs, _) = x.as_ref();
lhs.is_pure() && rhs.is_pure()
}
Self::Stmt(x) => x.0.is_pure(),
Self::Variable(_) => true,
_ => self.is_constant(),
}
}
/// Is the expression a constant?
pub fn is_constant(&self) -> bool {
match self {
Self::Expr(x) => x.is_constant(),
#[cfg(not(feature = "no_float"))]
Self::FloatConstant(_) => true,
Self::IntegerConstant(_)
| Self::CharConstant(_)
| Self::StringConstant(_)
| Self::FnPointer(_)
| Self::True(_)
| Self::False(_)
| Self::Unit(_) => true,
// An array literal is constant if all items are constant
Self::Array(x) => x.0.iter().all(Self::is_constant),
// An map literal is constant if all items are constant
Self::Map(x) => x.0.iter().map(|(_, expr)| expr).all(Self::is_constant),
// Check in expression
Self::In(x) => match (&x.0, &x.1) {
(Self::StringConstant(_), Self::StringConstant(_))
| (Self::CharConstant(_), Self::StringConstant(_)) => true,
_ => false,
},
_ => false,
}
}
/// Is a particular token allowed as a postfix operator to this expression?
pub fn is_valid_postfix(&self, token: &Token) -> bool {
match self {
Self::Expr(x) => x.is_valid_postfix(token),
#[cfg(not(feature = "no_float"))]
Self::FloatConstant(_) => false,
Self::IntegerConstant(_)
| Self::CharConstant(_)
| Self::FnPointer(_)
| Self::In(_)
| Self::And(_)
| Self::Or(_)
| Self::True(_)
| Self::False(_)
| Self::Unit(_)
| Self::Assignment(_) => false,
Self::StringConstant(_)
| Self::Stmt(_)
| Self::FnCall(_)
| Self::Dot(_)
| Self::Index(_)
| Self::Array(_)
| Self::Map(_) => match token {
#[cfg(not(feature = "no_index"))]
Token::LeftBracket => true,
_ => false,
},
Self::Variable(_) => match token {
#[cfg(not(feature = "no_index"))]
Token::LeftBracket => true,
Token::LeftParen => true,
Token::DoubleColon => true,
_ => false,
},
Self::Property(_) => match token {
#[cfg(not(feature = "no_index"))]
Token::LeftBracket => true,
Token::LeftParen => true,
_ => false,
},
Self::Custom(_) => false,
}
}
/// Convert a `Variable` into a `Property`. All other variants are untouched.
pub(crate) fn into_property(self) -> Self {
match self {
Self::Variable(x) if x.1.is_none() => {
let (name, pos) = x.0;
let getter = make_getter(&name);
let setter = make_setter(&name);
Self::Property(Box::new(((name.into(), getter, setter), pos)))
}
_ => self,
}
}
}
/// Consume a particular token, checking that it is the expected one.
fn eat_token(input: &mut TokenStream, token: Token) -> Position {
let (t, pos) = input.next().unwrap();
if t != token {
unreachable!(
"expecting {} (found {}) at {}",
token.syntax(),
t.syntax(),
pos
);
}
pos
}
/// Match a particular token, consuming it if matched.
fn match_token(input: &mut TokenStream, token: Token) -> Result<bool, ParseError> {
let (t, _) = input.peek().unwrap();
if *t == token {
eat_token(input, token);
Ok(true)
} else {
Ok(false)
}
}
/// Parse ( expr )
fn parse_paren_expr(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
settings: ParseSettings,
) -> Result<Expr, ParseError> {
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
if match_token(input, Token::RightParen)? {
return Ok(Expr::Unit(settings.pos));
}
let expr = parse_expr(input, state, lib, settings.level_up())?;
match input.next().unwrap() {
// ( xxx )
(Token::RightParen, _) => Ok(expr),
// ( <error>
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
// ( xxx ???
(_, pos) => Err(PERR::MissingToken(
Token::RightParen.into(),
"for a matching ( in this expression".into(),
)
.into_err(pos)),
}
}
/// Parse a function call.
fn parse_call_expr(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
id: String,
mut modules: Option<Box<ModuleRef>>,
settings: ParseSettings,
) -> Result<Expr, ParseError> {
let (token, token_pos) = input.peek().unwrap();
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut args = StaticVec::new();
match token {
// id( <EOF>
Token::EOF => {
return Err(PERR::MissingToken(
Token::RightParen.into(),
format!("to close the arguments list of this function call '{}'", id),
)
.into_err(*token_pos))
}
// id( <error>
Token::LexError(err) => return Err(err.into_err(*token_pos)),
// id()
Token::RightParen => {
eat_token(input, Token::RightParen);
let hash_script = if let Some(modules) = modules.as_mut() {
modules.set_index(state.find_module(&modules.get(0).0));
// Rust functions are indexed in two steps:
// 1) Calculate a hash in a similar manner to script-defined functions,
// i.e. qualifiers + function name + number of arguments.
// 2) Calculate a second hash with no qualifiers, empty function name,
// zero number of arguments, and the actual list of argument `TypeId`'s.
// 3) The final hash is the XOR of the two hashes.
let qualifiers = modules.iter().map(|(m, _)| m.as_str());
calc_fn_hash(qualifiers, &id, 0, empty())
} else {
// Qualifiers (none) + function name + no parameters.
calc_fn_hash(empty(), &id, 0, empty())
};
return Ok(Expr::FnCall(Box::new((
(id.into(), false, settings.pos),
modules,
hash_script,
args,
None,
))));
}
// id...
_ => (),
}
let settings = settings.level_up();
loop {
match input.peek().unwrap() {
// id(...args, ) - handle trailing comma
(Token::RightParen, _) => (),
_ => args.push(parse_expr(input, state, lib, settings)?),
}
match input.peek().unwrap() {
// id(...args)
(Token::RightParen, _) => {
eat_token(input, Token::RightParen);
let hash_script = if let Some(modules) = modules.as_mut() {
modules.set_index(state.find_module(&modules.get(0).0));
// Rust functions are indexed in two steps:
// 1) Calculate a hash in a similar manner to script-defined functions,
// i.e. qualifiers + function name + number of arguments.
// 2) Calculate a second hash with no qualifiers, empty function name,
// zero number of arguments, and the actual list of argument `TypeId`'s.
// 3) The final hash is the XOR of the two hashes.
let qualifiers = modules.iter().map(|(m, _)| m.as_str());
calc_fn_hash(qualifiers, &id, args.len(), empty())
} else {
// Qualifiers (none) + function name + number of arguments.
calc_fn_hash(empty(), &id, args.len(), empty())
};
return Ok(Expr::FnCall(Box::new((
(id.into(), false, settings.pos),
modules,
hash_script,
args,
None,
))));
}
// id(...args,
(Token::Comma, _) => {
eat_token(input, Token::Comma);
}
// id(...args <EOF>
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightParen.into(),
format!("to close the arguments list of this function call '{}'", id),
)
.into_err(*pos))
}
// id(...args <error>
(Token::LexError(err), pos) => return Err(err.into_err(*pos)),
// id(...args ???
(_, pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
format!("to separate the arguments to function call '{}'", id),
)
.into_err(*pos))
}
}
}
}
/// Parse an indexing chain.
/// Indexing binds to the right, so this call parses all possible levels of indexing following in the input.
fn parse_index_chain(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
lhs: Expr,
mut settings: ParseSettings,
) -> Result<Expr, ParseError> {
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let idx_expr = parse_expr(input, state, lib, settings.level_up())?;
// Check type of indexing - must be integer or string
match &idx_expr {
// lhs[int]
Expr::IntegerConstant(x) if x.0 < 0 => {
return Err(PERR::MalformedIndexExpr(format!(
"Array access expects non-negative index: {} < 0",
x.0
))
.into_err(x.1))
}
Expr::IntegerConstant(x) => match lhs {
Expr::Array(_) | Expr::StringConstant(_) => (),
Expr::Map(_) => {
return Err(PERR::MalformedIndexExpr(
"Object map access expects string index, not a number".into(),
)
.into_err(x.1))
}
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(_) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.position()))
}
Expr::CharConstant(_)
| Expr::Assignment(_)
| Expr::And(_)
| Expr::Or(_)
| Expr::In(_)
| Expr::True(_)
| Expr::False(_)
| Expr::Unit(_) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.position()))
}
_ => (),
},
// lhs[string]
Expr::StringConstant(x) => match lhs {
Expr::Map(_) => (),
Expr::Array(_) | Expr::StringConstant(_) => {
return Err(PERR::MalformedIndexExpr(
"Array or string expects numeric index, not a string".into(),
)
.into_err(x.1))
}
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(_) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.position()))
}
Expr::CharConstant(_)
| Expr::Assignment(_)
| Expr::And(_)
| Expr::Or(_)
| Expr::In(_)
| Expr::True(_)
| Expr::False(_)
| Expr::Unit(_) => {
return Err(PERR::MalformedIndexExpr(
"Only arrays, object maps and strings can be indexed".into(),
)
.into_err(lhs.position()))
}
_ => (),
},
// lhs[float]
#[cfg(not(feature = "no_float"))]
x @ Expr::FloatConstant(_) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a float".into(),
)
.into_err(x.position()))
}
// lhs[char]
x @ Expr::CharConstant(_) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a character".into(),
)
.into_err(x.position()))
}
// lhs[??? = ??? ]
x @ Expr::Assignment(_) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not an assignment".into(),
)
.into_err(x.position()))
}
// lhs[()]
x @ Expr::Unit(_) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not ()".into(),
)
.into_err(x.position()))
}
// lhs[??? && ???], lhs[??? || ???], lhs[??? in ???]
x @ Expr::And(_) | x @ Expr::Or(_) | x @ Expr::In(_) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a boolean".into(),
)
.into_err(x.position()))
}
// lhs[true], lhs[false]
x @ Expr::True(_) | x @ Expr::False(_) => {
return Err(PERR::MalformedIndexExpr(
"Array access expects integer index, not a boolean".into(),
)
.into_err(x.position()))
}
// All other expressions
_ => (),
}
// Check if there is a closing bracket
match input.peek().unwrap() {
(Token::RightBracket, _) => {
eat_token(input, Token::RightBracket);
// Any more indexing following?
match input.peek().unwrap() {
// If another indexing level, right-bind it
(Token::LeftBracket, _) => {
let prev_pos = settings.pos;
settings.pos = eat_token(input, Token::LeftBracket);
// Recursively parse the indexing chain, right-binding each
let idx_expr =
parse_index_chain(input, state, lib, idx_expr, settings.level_up())?;
// Indexing binds to right
Ok(Expr::Index(Box::new((lhs, idx_expr, prev_pos))))
}
// Otherwise terminate the indexing chain
_ => {
match idx_expr {
// Terminate with an `Expr::Expr` wrapper to prevent the last index expression
// inside brackets to be mis-parsed as another level of indexing, or a
// dot expression/function call to be mis-parsed as following the indexing chain.
Expr::Index(_) | Expr::Dot(_) | Expr::FnCall(_) => Ok(Expr::Index(
Box::new((lhs, Expr::Expr(Box::new(idx_expr)), settings.pos)),
)),
_ => Ok(Expr::Index(Box::new((lhs, idx_expr, settings.pos)))),
}
}
}
}
(Token::LexError(err), pos) => return Err(err.into_err(*pos)),
(_, pos) => Err(PERR::MissingToken(
Token::RightBracket.into(),
"for a matching [ in this index expression".into(),
)
.into_err(*pos)),
}
}
/// Parse an array literal.
fn parse_array_literal(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
settings: ParseSettings,
) -> Result<Expr, ParseError> {
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut arr = StaticVec::new();
while !input.peek().unwrap().0.is_eof() {
if state.engine.max_array_size > 0 && arr.len() >= state.engine.max_array_size {
return Err(PERR::LiteralTooLarge(
"Size of array literal".to_string(),
state.engine.max_array_size,
)
.into_err(input.peek().unwrap().1));
}
match input.peek().unwrap() {
(Token::RightBracket, _) => {
eat_token(input, Token::RightBracket);
break;
}
_ => {
let expr = parse_expr(input, state, lib, settings.level_up())?;
arr.push(expr);
}
}
match input.peek().unwrap() {
(Token::Comma, _) => {
eat_token(input, Token::Comma);
}
(Token::RightBracket, _) => (),
(Token::EOF, pos) => {
return Err(PERR::MissingToken(
Token::RightBracket.into(),
"to end this array literal".into(),
)
.into_err(*pos))
}
(Token::LexError(err), pos) => return Err(err.into_err(*pos)),
(_, pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the items of this array literal".into(),
)
.into_err(*pos))
}
};
}
Ok(Expr::Array(Box::new((arr, settings.pos))))
}
/// Parse a map literal.
fn parse_map_literal(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
settings: ParseSettings,
) -> Result<Expr, ParseError> {
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut map = StaticVec::new();
while !input.peek().unwrap().0.is_eof() {
const MISSING_RBRACE: &str = "to end this object map literal";
match input.peek().unwrap() {
(Token::RightBrace, _) => {
eat_token(input, Token::RightBrace);
break;
}
_ => (),
}
let (name, pos) = match input.next().unwrap() {
(Token::Identifier(s), pos) => (s, pos),
(Token::StringConstant(s), pos) => (s, pos),
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) if map.is_empty() => {
return Err(
PERR::MissingToken(Token::RightBrace.into(), MISSING_RBRACE.into())
.into_err(pos),
);
}
(Token::EOF, pos) => {
return Err(
PERR::MissingToken(Token::RightBrace.into(), MISSING_RBRACE.into())
.into_err(pos),
);
}
(_, pos) => return Err(PERR::PropertyExpected.into_err(pos)),
};
match input.next().unwrap() {
(Token::Colon, _) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(PERR::MissingToken(
Token::Colon.into(),
format!(
"to follow the property '{}' in this object map literal",
name
),
)
.into_err(pos))
}
};
if state.engine.max_map_size > 0 && map.len() >= state.engine.max_map_size {
return Err(PERR::LiteralTooLarge(
"Number of properties in object map literal".to_string(),
state.engine.max_map_size,
)
.into_err(input.peek().unwrap().1));
}
let expr = parse_expr(input, state, lib, settings.level_up())?;
map.push(((Into::<ImmutableString>::into(name), pos), expr));
match input.peek().unwrap() {
(Token::Comma, _) => {
eat_token(input, Token::Comma);
}
(Token::RightBrace, _) => (),
(Token::Identifier(_), pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the items of this object map literal".into(),
)
.into_err(*pos))
}
(Token::LexError(err), pos) => return Err(err.into_err(*pos)),
(_, pos) => {
return Err(
PERR::MissingToken(Token::RightBrace.into(), MISSING_RBRACE.into())
.into_err(*pos),
)
}
}
}
// Check for duplicating properties
map.iter()
.enumerate()
.try_for_each(|(i, ((k1, _), _))| {
map.iter()
.skip(i + 1)
.find(|((k2, _), _)| k2 == k1)
.map_or_else(|| Ok(()), |((k2, pos), _)| Err((k2, *pos)))
})
.map_err(|(key, pos)| PERR::DuplicatedProperty(key.to_string()).into_err(pos))?;
Ok(Expr::Map(Box::new((map, settings.pos))))
}
/// Parse a primary expression.
fn parse_primary(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Expr, ParseError> {
let (token, token_pos) = input.peek().unwrap();
settings.pos = *token_pos;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let (token, _) = match token {
// { - block statement as expression
Token::LeftBrace if settings.allow_stmt_expr => {
return parse_block(input, state, lib, settings.level_up())
.map(|block| Expr::Stmt(Box::new((block, settings.pos))))
}
Token::EOF => return Err(PERR::UnexpectedEOF.into_err(settings.pos)),
_ => input.next().unwrap(),
};
let mut root_expr = match token {
Token::IntegerConstant(x) => Expr::IntegerConstant(Box::new((x, settings.pos))),
#[cfg(not(feature = "no_float"))]
Token::FloatConstant(x) => Expr::FloatConstant(Box::new(FloatWrapper(x, settings.pos))),
Token::CharConstant(c) => Expr::CharConstant(Box::new((c, settings.pos))),
Token::StringConstant(s) => Expr::StringConstant(Box::new((s.into(), settings.pos))),
Token::Identifier(s) => {
let index = state.find_var(&s);
Expr::Variable(Box::new(((s, settings.pos), None, 0, index)))
}
// Function call is allowed to have reserved keyword
Token::Reserved(s) if s != KEYWORD_THIS && input.peek().unwrap().0 == Token::LeftParen => {
Expr::Variable(Box::new(((s, settings.pos), None, 0, None)))
}
// Access to `this` as a variable is OK
Token::Reserved(s) if s == KEYWORD_THIS && input.peek().unwrap().0 != Token::LeftParen => {
if !settings.is_function_scope {
return Err(
PERR::BadInput(format!("'{}' can only be used in functions", s))
.into_err(settings.pos),
);
} else {
Expr::Variable(Box::new(((s, settings.pos), None, 0, None)))
}
}
Token::Reserved(s) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(settings.pos));
}
Token::LeftParen => parse_paren_expr(input, state, lib, settings.level_up())?,
#[cfg(not(feature = "no_index"))]
Token::LeftBracket => parse_array_literal(input, state, lib, settings.level_up())?,
#[cfg(not(feature = "no_object"))]
Token::MapStart => parse_map_literal(input, state, lib, settings.level_up())?,
Token::True => Expr::True(settings.pos),
Token::False => Expr::False(settings.pos),
Token::LexError(err) => return Err(err.into_err(settings.pos)),
_ => {
return Err(
PERR::BadInput(format!("Unexpected '{}'", token.syntax())).into_err(settings.pos)
);
}
};
// Tail processing all possible postfix operators
loop {
let (token, _) = input.peek().unwrap();
if !root_expr.is_valid_postfix(token) {
break;
}
let (token, token_pos) = input.next().unwrap();
settings.pos = token_pos;
root_expr = match (root_expr, token) {
// Function call
(Expr::Variable(x), Token::LeftParen) => {
let ((name, pos), modules, _, _) = *x;
settings.pos = pos;
parse_call_expr(input, state, lib, name, modules, settings.level_up())?
}
(Expr::Property(_), _) => unreachable!(),
// module access
(Expr::Variable(x), Token::DoubleColon) => match input.next().unwrap() {
(Token::Identifier(id2), pos2) => {
let ((name, pos), mut modules, _, index) = *x;
if let Some(ref mut modules) = modules {
modules.push((name, pos));
} else {
let mut m: ModuleRef = Default::default();
m.push((name, pos));
modules = Some(Box::new(m));
}
Expr::Variable(Box::new(((id2, pos2), modules, 0, index)))
}
(Token::Reserved(id2), pos2) if is_valid_identifier(id2.chars()) => {
return Err(PERR::Reserved(id2).into_err(pos2));
}
(_, pos2) => return Err(PERR::VariableExpected.into_err(pos2)),
},
// Indexing
#[cfg(not(feature = "no_index"))]
(expr, Token::LeftBracket) => {
parse_index_chain(input, state, lib, expr, settings.level_up())?
}
// Unknown postfix operator
(expr, token) => unreachable!(
"unknown postfix operator '{}' for {:?}",
token.syntax(),
expr
),
}
}
match &mut root_expr {
// Cache the hash key for module-qualified variables
Expr::Variable(x) if x.1.is_some() => {
let ((name, _), modules, hash, _) = x.as_mut();
let modules = modules.as_mut().unwrap();
// Qualifiers + variable name
*hash = calc_fn_hash(modules.iter().map(|(v, _)| v.as_str()), name, 0, empty());
modules.set_index(state.find_module(&modules.get(0).0));
}
_ => (),
}
// Make sure identifiers are valid
Ok(root_expr)
}
/// Parse a potential unary operator.
fn parse_unary(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Expr, ParseError> {
let (token, token_pos) = input.peek().unwrap();
settings.pos = *token_pos;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
match token {
// If statement is allowed to act as expressions
Token::If if settings.allow_if_expr => Ok(Expr::Stmt(Box::new((
parse_if(input, state, lib, settings.level_up())?,
settings.pos,
)))),
// -expr
Token::UnaryMinus => {
let pos = eat_token(input, Token::UnaryMinus);
match parse_unary(input, state, lib, settings.level_up())? {
// Negative integer
Expr::IntegerConstant(x) => {
let (num, pos) = *x;
num.checked_neg()
.map(|i| Expr::IntegerConstant(Box::new((i, pos))))
.or_else(|| {
#[cfg(not(feature = "no_float"))]
return Some(Expr::FloatConstant(Box::new(FloatWrapper(
-(x.0 as FLOAT),
pos,
))));
#[cfg(feature = "no_float")]
return None;
})
.ok_or_else(|| LexError::MalformedNumber(format!("-{}", x.0)).into_err(pos))
}
// Negative float
#[cfg(not(feature = "no_float"))]
Expr::FloatConstant(x) => {
Ok(Expr::FloatConstant(Box::new(FloatWrapper(-x.0, x.1))))
}
// Call negative function
expr => {
let op = "-";
let hash = calc_fn_hash(empty(), op, 2, empty());
let mut args = StaticVec::new();
args.push(expr);
Ok(Expr::FnCall(Box::new((
(op.into(), true, pos),
None,
hash,
args,
None,
))))
}
}
}
// +expr
Token::UnaryPlus => {
eat_token(input, Token::UnaryPlus);
parse_unary(input, state, lib, settings.level_up())
}
// !expr
Token::Bang => {
let pos = eat_token(input, Token::Bang);
let mut args = StaticVec::new();
let expr = parse_primary(input, state, lib, settings.level_up())?;
args.push(expr);
let op = "!";
let hash = calc_fn_hash(empty(), op, 2, empty());
Ok(Expr::FnCall(Box::new((
(op.into(), true, pos),
None,
hash,
args,
Some(false), // NOT operator, when operating on invalid operand, defaults to false
))))
}
// | ...
#[cfg(not(feature = "no_function"))]
Token::Pipe | Token::Or => {
let mut state = ParseState::new(
state.engine,
state.max_function_expr_depth,
state.max_function_expr_depth,
);
let settings = ParseSettings {
allow_if_expr: true,
allow_stmt_expr: true,
allow_anonymous_fn: true,
is_global: false,
is_function_scope: true,
is_breakable: false,
level: 0,
pos: *token_pos,
};
let (expr, func) = parse_anon_fn(input, &mut state, lib, settings)?;
// Qualifiers (none) + function name + number of arguments.
let hash = calc_fn_hash(empty(), &func.name, func.params.len(), empty());
lib.insert(hash, func);
Ok(expr)
}
// <EOF>
Token::EOF => Err(PERR::UnexpectedEOF.into_err(settings.pos)),
// All other tokens
_ => parse_primary(input, state, lib, settings.level_up()),
}
}
fn make_assignment_stmt<'a>(
fn_name: Cow<'static, str>,
state: &mut ParseState,
lhs: Expr,
rhs: Expr,
pos: Position,
) -> Result<Expr, ParseError> {
match &lhs {
// var (non-indexed) = rhs
Expr::Variable(x) if x.3.is_none() => {
Ok(Expr::Assignment(Box::new((lhs, fn_name.into(), rhs, pos))))
}
// var (indexed) = rhs
Expr::Variable(x) => {
let ((name, name_pos), _, _, index) = x.as_ref();
match state.stack[(state.stack.len() - index.unwrap().get())].1 {
ScopeEntryType::Normal => {
Ok(Expr::Assignment(Box::new((lhs, fn_name.into(), rhs, pos))))
}
// Constant values cannot be assigned to
ScopeEntryType::Constant => {
Err(PERR::AssignmentToConstant(name.clone()).into_err(*name_pos))
}
}
}
// xxx[???] = rhs, xxx.??? = rhs
Expr::Index(x) | Expr::Dot(x) => match &x.0 {
// var[???] (non-indexed) = rhs, var.??? (non-indexed) = rhs
Expr::Variable(x) if x.3.is_none() => {
Ok(Expr::Assignment(Box::new((lhs, fn_name.into(), rhs, pos))))
}
// var[???] (indexed) = rhs, var.??? (indexed) = rhs
Expr::Variable(x) => {
let ((name, name_pos), _, _, index) = x.as_ref();
match state.stack[(state.stack.len() - index.unwrap().get())].1 {
ScopeEntryType::Normal => {
Ok(Expr::Assignment(Box::new((lhs, fn_name.into(), rhs, pos))))
}
// Constant values cannot be assigned to
ScopeEntryType::Constant => {
Err(PERR::AssignmentToConstant(name.clone()).into_err(*name_pos))
}
}
}
// expr[???] = rhs, expr.??? = rhs
_ => Err(PERR::AssignmentToCopy.into_err(x.0.position())),
},
// const_expr = rhs
expr if expr.is_constant() => {
Err(PERR::AssignmentToConstant("".into()).into_err(lhs.position()))
}
// ??? && ??? = rhs, ??? || ??? = rhs
Expr::And(_) | Expr::Or(_) => {
Err(PERR::BadInput("Possibly a typo of '=='?".to_string()).into_err(pos))
}
// expr = rhs
_ => Err(PERR::AssignmentToCopy.into_err(lhs.position())),
}
}
/// Parse an operator-assignment expression.
fn parse_op_assignment_stmt(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
lhs: Expr,
mut settings: ParseSettings,
) -> Result<Expr, ParseError> {
let (token, token_pos) = input.peek().unwrap();
settings.pos = *token_pos;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let op = match token {
Token::Equals => "".into(),
Token::PlusAssign
| Token::MinusAssign
| Token::MultiplyAssign
| Token::DivideAssign
| Token::LeftShiftAssign
| Token::RightShiftAssign
| Token::ModuloAssign
| Token::PowerOfAssign
| Token::AndAssign
| Token::OrAssign
| Token::XOrAssign => token.syntax(),
_ => return Ok(lhs),
};
let (_, pos) = input.next().unwrap();
let rhs = parse_expr(input, state, lib, settings.level_up())?;
make_assignment_stmt(op, state, lhs, rhs, pos)
}
/// Make a dot expression.
fn make_dot_expr(lhs: Expr, rhs: Expr, op_pos: Position) -> Result<Expr, ParseError> {
Ok(match (lhs, rhs) {
// idx_lhs[idx_expr].rhs
// Attach dot chain to the bottom level of indexing chain
(Expr::Index(x), rhs) => {
let (idx_lhs, idx_expr, pos) = *x;
Expr::Index(Box::new((
idx_lhs,
make_dot_expr(idx_expr, rhs, op_pos)?,
pos,
)))
}
// lhs.id
(lhs, Expr::Variable(x)) if x.1.is_none() => {
let (name, pos) = x.0;
let getter = make_getter(&name);
let setter = make_setter(&name);
let rhs = Expr::Property(Box::new(((name.into(), getter, setter), pos)));
Expr::Dot(Box::new((lhs, rhs, op_pos)))
}
// lhs.module::id - syntax error
(_, Expr::Variable(x)) if x.1.is_some() => {
return Err(PERR::PropertyExpected.into_err(x.1.unwrap().get(0).1));
}
// lhs.prop
(lhs, prop @ Expr::Property(_)) => Expr::Dot(Box::new((lhs, prop, op_pos))),
// lhs.dot_lhs.dot_rhs
(lhs, Expr::Dot(x)) => {
let (dot_lhs, dot_rhs, pos) = *x;
Expr::Dot(Box::new((
lhs,
Expr::Dot(Box::new((dot_lhs.into_property(), dot_rhs, pos))),
op_pos,
)))
}
// lhs.idx_lhs[idx_rhs]
(lhs, Expr::Index(x)) => {
let (dot_lhs, dot_rhs, pos) = *x;
Expr::Dot(Box::new((
lhs,
Expr::Index(Box::new((dot_lhs.into_property(), dot_rhs, pos))),
op_pos,
)))
}
// lhs.func()
(lhs, func @ Expr::FnCall(_)) => Expr::Dot(Box::new((lhs, func, op_pos))),
// lhs.rhs
(_, rhs) => return Err(PERR::PropertyExpected.into_err(rhs.position())),
})
}
/// Make an 'in' expression.
fn make_in_expr(lhs: Expr, rhs: Expr, op_pos: Position) -> Result<Expr, ParseError> {
match (&lhs, &rhs) {
(_, x @ Expr::IntegerConstant(_))
| (_, x @ Expr::And(_))
| (_, x @ Expr::Or(_))
| (_, x @ Expr::In(_))
| (_, x @ Expr::Assignment(_))
| (_, x @ Expr::True(_))
| (_, x @ Expr::False(_))
| (_, x @ Expr::Unit(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression expects a string, array or object map".into(),
)
.into_err(x.position()))
}
#[cfg(not(feature = "no_float"))]
(_, x @ Expr::FloatConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression expects a string, array or object map".into(),
)
.into_err(x.position()))
}
// "xxx" in "xxxx", 'x' in "xxxx" - OK!
(Expr::StringConstant(_), Expr::StringConstant(_))
| (Expr::CharConstant(_), Expr::StringConstant(_)) => (),
// 123.456 in "xxxx"
#[cfg(not(feature = "no_float"))]
(x @ Expr::FloatConstant(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not a float".into(),
)
.into_err(x.position()))
}
// 123 in "xxxx"
(x @ Expr::IntegerConstant(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not a number".into(),
)
.into_err(x.position()))
}
// (??? && ???) in "xxxx", (??? || ???) in "xxxx", (??? in ???) in "xxxx",
// true in "xxxx", false in "xxxx"
(x @ Expr::And(_), Expr::StringConstant(_))
| (x @ Expr::Or(_), Expr::StringConstant(_))
| (x @ Expr::In(_), Expr::StringConstant(_))
| (x @ Expr::True(_), Expr::StringConstant(_))
| (x @ Expr::False(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not a boolean".into(),
)
.into_err(x.position()))
}
// [???, ???, ???] in "xxxx"
(x @ Expr::Array(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not an array".into(),
)
.into_err(x.position()))
}
// #{...} in "xxxx"
(x @ Expr::Map(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not an object map".into(),
)
.into_err(x.position()))
}
// (??? = ???) in "xxxx"
(x @ Expr::Assignment(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not an assignment".into(),
)
.into_err(x.position()))
}
// () in "xxxx"
(x @ Expr::Unit(_), Expr::StringConstant(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for a string expects a string, not ()".into(),
)
.into_err(x.position()))
}
// "xxx" in #{...}, 'x' in #{...} - OK!
(Expr::StringConstant(_), Expr::Map(_)) | (Expr::CharConstant(_), Expr::Map(_)) => (),
// 123.456 in #{...}
#[cfg(not(feature = "no_float"))]
(x @ Expr::FloatConstant(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not a float".into(),
)
.into_err(x.position()))
}
// 123 in #{...}
(x @ Expr::IntegerConstant(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not a number".into(),
)
.into_err(x.position()))
}
// (??? && ???) in #{...}, (??? || ???) in #{...}, (??? in ???) in #{...},
// true in #{...}, false in #{...}
(x @ Expr::And(_), Expr::Map(_))
| (x @ Expr::Or(_), Expr::Map(_))
| (x @ Expr::In(_), Expr::Map(_))
| (x @ Expr::True(_), Expr::Map(_))
| (x @ Expr::False(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not a boolean".into(),
)
.into_err(x.position()))
}
// [???, ???, ???] in #{..}
(x @ Expr::Array(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not an array".into(),
)
.into_err(x.position()))
}
// #{...} in #{..}
(x @ Expr::Map(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not an object map".into(),
)
.into_err(x.position()))
}
// (??? = ???) in #{...}
(x @ Expr::Assignment(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not an assignment".into(),
)
.into_err(x.position()))
}
// () in #{...}
(x @ Expr::Unit(_), Expr::Map(_)) => {
return Err(PERR::MalformedInExpr(
"'in' expression for an object map expects a string, not ()".into(),
)
.into_err(x.position()))
}
_ => (),
}
Ok(Expr::In(Box::new((lhs, rhs, op_pos))))
}
/// Parse a binary expression.
fn parse_binary_op(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
parent_precedence: u8,
lhs: Expr,
mut settings: ParseSettings,
) -> Result<Expr, ParseError> {
settings.pos = lhs.position();
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut root = lhs;
loop {
let (current_op, _) = input.peek().unwrap();
let custom = state.engine.custom_keywords.as_ref();
let precedence = current_op.precedence(custom);
let bind_right = current_op.is_bind_right();
// Bind left to the parent lhs expression if precedence is higher
// If same precedence, then check if the operator binds right
if precedence < parent_precedence || (precedence == parent_precedence && !bind_right) {
return Ok(root);
}
let (op_token, pos) = input.next().unwrap();
let rhs = parse_unary(input, state, lib, settings)?;
let next_precedence = input.peek().unwrap().0.precedence(custom);
// Bind to right if the next operator has higher precedence
// If same precedence, then check if the operator binds right
let rhs = if (precedence == next_precedence && bind_right) || precedence < next_precedence {
parse_binary_op(input, state, lib, precedence, rhs, settings)?
} else {
// Otherwise bind to left (even if next operator has the same precedence)
rhs
};
settings = settings.level_up();
settings.pos = pos;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let cmp_def = Some(false);
let op = op_token.syntax();
let hash = calc_fn_hash(empty(), &op, 2, empty());
let op = (op, true, pos);
let mut args = StaticVec::new();
args.push(root);
args.push(rhs);
root = match op_token {
Token::Plus
| Token::Minus
| Token::Multiply
| Token::Divide
| Token::LeftShift
| Token::RightShift
| Token::Modulo
| Token::PowerOf
| Token::Ampersand
| Token::Pipe
| Token::XOr => Expr::FnCall(Box::new((op, None, hash, args, None))),
// '!=' defaults to true when passed invalid operands
Token::NotEqualsTo => Expr::FnCall(Box::new((op, None, hash, args, Some(true)))),
// Comparison operators default to false when passed invalid operands
Token::EqualsTo
| Token::LessThan
| Token::LessThanEqualsTo
| Token::GreaterThan
| Token::GreaterThanEqualsTo => Expr::FnCall(Box::new((op, None, hash, args, cmp_def))),
Token::Or => {
let rhs = args.pop();
let current_lhs = args.pop();
Expr::Or(Box::new((current_lhs, rhs, pos)))
}
Token::And => {
let rhs = args.pop();
let current_lhs = args.pop();
Expr::And(Box::new((current_lhs, rhs, pos)))
}
Token::In => {
let rhs = args.pop();
let current_lhs = args.pop();
make_in_expr(current_lhs, rhs, pos)?
}
#[cfg(not(feature = "no_object"))]
Token::Period => {
let rhs = args.pop();
let current_lhs = args.pop();
make_dot_expr(current_lhs, rhs, pos)?
}
Token::Custom(s)
if state
.engine
.custom_keywords
.as_ref()
.map(|c| c.contains_key(&s))
.unwrap_or(false) =>
{
// Accept non-native functions for custom operators
let op = (op.0, false, op.2);
Expr::FnCall(Box::new((op, None, hash, args, None)))
}
op_token => return Err(PERR::UnknownOperator(op_token.into()).into_err(pos)),
};
}
}
/// Parse an expression.
fn parse_expr(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Expr, ParseError> {
settings.pos = input.peek().unwrap().1;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// Check if it is a custom syntax.
if let Some(ref custom) = state.engine.custom_syntax {
let (token, pos) = input.peek().unwrap();
let token_pos = *pos;
match token {
Token::Custom(key) if custom.contains_key(key) => {
let custom = custom.get_key_value(key).unwrap();
let (key, syntax) = custom;
input.next().unwrap();
let mut exprs: StaticVec<Expr> = Default::default();
// Adjust the variables stack
match syntax.scope_delta {
delta if delta > 0 => {
state.stack.push(("".to_string(), ScopeEntryType::Normal))
}
delta if delta < 0 && state.stack.len() <= delta.abs() as usize => {
state.stack.clear()
}
delta if delta < 0 => state
.stack
.truncate(state.stack.len() - delta.abs() as usize),
_ => (),
}
for segment in syntax.segments.iter() {
settings.pos = input.peek().unwrap().1;
let settings = settings.level_up();
match segment.as_str() {
MARKER_IDENT => match input.next().unwrap() {
(Token::Identifier(s), pos) => {
exprs.push(Expr::Variable(Box::new(((s, pos), None, 0, None))));
}
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
},
MARKER_EXPR => exprs.push(parse_expr(input, state, lib, settings)?),
MARKER_BLOCK => {
let stmt = parse_block(input, state, lib, settings)?;
let pos = stmt.position();
exprs.push(Expr::Stmt(Box::new((stmt, pos))))
}
s => match input.peek().unwrap() {
(t, _) if t.syntax().as_ref() == s => {
input.next().unwrap();
}
(_, pos) => {
return Err(PERR::MissingToken(
s.to_string(),
format!("for '{}' expression", key),
)
.into_err(*pos))
}
},
}
}
return Ok(Expr::Custom(Box::new((
CustomExpr(exprs, syntax.func.clone()),
token_pos,
))));
}
_ => (),
}
}
// Parse expression normally.
let lhs = parse_unary(input, state, lib, settings.level_up())?;
parse_binary_op(input, state, lib, 1, lhs, settings.level_up())
}
/// Make sure that the expression is not a statement expression (i.e. wrapped in `{}`).
fn ensure_not_statement_expr(input: &mut TokenStream, type_name: &str) -> Result<(), ParseError> {
match input.peek().unwrap() {
// Disallow statement expressions
(Token::LeftBrace, pos) | (Token::EOF, pos) => {
Err(PERR::ExprExpected(type_name.to_string()).into_err(*pos))
}
// No need to check for others at this time - leave it for the expr parser
_ => Ok(()),
}
}
/// Make sure that the expression is not a mis-typed assignment (i.e. `a = b` instead of `a == b`).
fn ensure_not_assignment(input: &mut TokenStream) -> Result<(), ParseError> {
match input.peek().unwrap() {
(Token::Equals, pos) => {
return Err(PERR::BadInput("Possibly a typo of '=='?".to_string()).into_err(*pos))
}
(Token::PlusAssign, pos)
| (Token::MinusAssign, pos)
| (Token::MultiplyAssign, pos)
| (Token::DivideAssign, pos)
| (Token::LeftShiftAssign, pos)
| (Token::RightShiftAssign, pos)
| (Token::ModuloAssign, pos)
| (Token::PowerOfAssign, pos)
| (Token::AndAssign, pos)
| (Token::OrAssign, pos)
| (Token::XOrAssign, pos) => {
return Err(PERR::BadInput(
"Expecting a boolean expression, not an assignment".to_string(),
)
.into_err(*pos))
}
_ => Ok(()),
}
}
/// Parse an if statement.
fn parse_if(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// if ...
settings.pos = eat_token(input, Token::If);
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// if guard { if_body }
ensure_not_statement_expr(input, "a boolean")?;
let guard = parse_expr(input, state, lib, settings.level_up())?;
ensure_not_assignment(input)?;
let if_body = parse_block(input, state, lib, settings.level_up())?;
// if guard { if_body } else ...
let else_body = if match_token(input, Token::Else).unwrap_or(false) {
Some(if let (Token::If, _) = input.peek().unwrap() {
// if guard { if_body } else if ...
parse_if(input, state, lib, settings.level_up())?
} else {
// if guard { if_body } else { else-body }
parse_block(input, state, lib, settings.level_up())?
})
} else {
None
};
Ok(Stmt::IfThenElse(Box::new((guard, if_body, else_body))))
}
/// Parse a while loop.
fn parse_while(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// while ...
settings.pos = eat_token(input, Token::While);
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// while guard { body }
ensure_not_statement_expr(input, "a boolean")?;
let guard = parse_expr(input, state, lib, settings.level_up())?;
ensure_not_assignment(input)?;
settings.is_breakable = true;
let body = parse_block(input, state, lib, settings.level_up())?;
Ok(Stmt::While(Box::new((guard, body))))
}
/// Parse a loop statement.
fn parse_loop(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// loop ...
settings.pos = eat_token(input, Token::Loop);
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// loop { body }
settings.is_breakable = true;
let body = parse_block(input, state, lib, settings.level_up())?;
Ok(Stmt::Loop(Box::new(body)))
}
/// Parse a for loop.
fn parse_for(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// for ...
settings.pos = eat_token(input, Token::For);
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// for name ...
let name = match input.next().unwrap() {
// Variable name
(Token::Identifier(s), _) => s,
// Reserved keyword
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
// Bad identifier
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
// Not a variable name
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
};
// for name in ...
match input.next().unwrap() {
(Token::In, _) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(
PERR::MissingToken(Token::In.into(), "after the iteration variable".into())
.into_err(pos),
)
}
}
// for name in expr { body }
ensure_not_statement_expr(input, "a boolean")?;
let expr = parse_expr(input, state, lib, settings.level_up())?;
let prev_stack_len = state.stack.len();
state.stack.push((name.clone(), ScopeEntryType::Normal));
settings.is_breakable = true;
let body = parse_block(input, state, lib, settings.level_up())?;
state.stack.truncate(prev_stack_len);
Ok(Stmt::For(Box::new((name, expr, body))))
}
/// Parse a variable definition statement.
fn parse_let(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
var_type: ScopeEntryType,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// let/const... (specified in `var_type`)
settings.pos = input.next().unwrap().1;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// let name ...
let (name, pos) = match input.next().unwrap() {
(Token::Identifier(s), pos) => (s, pos),
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
};
// let name = ...
if match_token(input, Token::Equals)? {
// let name = expr
let init_value = parse_expr(input, state, lib, settings.level_up())?;
match var_type {
// let name = expr
ScopeEntryType::Normal => {
state.stack.push((name.clone(), ScopeEntryType::Normal));
Ok(Stmt::Let(Box::new(((name, pos), Some(init_value)))))
}
// const name = { expr:constant }
ScopeEntryType::Constant if init_value.is_constant() => {
state.stack.push((name.clone(), ScopeEntryType::Constant));
Ok(Stmt::Const(Box::new(((name, pos), init_value))))
}
// const name = expr: error
ScopeEntryType::Constant => {
Err(PERR::ForbiddenConstantExpr(name).into_err(init_value.position()))
}
}
} else {
// let name
match var_type {
ScopeEntryType::Normal => {
state.stack.push((name.clone(), ScopeEntryType::Normal));
Ok(Stmt::Let(Box::new(((name, pos), None))))
}
ScopeEntryType::Constant => {
state.stack.push((name.clone(), ScopeEntryType::Constant));
Ok(Stmt::Const(Box::new(((name, pos), Expr::Unit(pos)))))
}
}
}
}
/// Parse an import statement.
#[cfg(not(feature = "no_module"))]
fn parse_import(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// import ...
settings.pos = eat_token(input, Token::Import);
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// import expr ...
let expr = parse_expr(input, state, lib, settings.level_up())?;
// import expr as ...
match input.next().unwrap() {
(Token::As, _) => (),
(_, pos) => {
return Err(
PERR::MissingToken(Token::As.into(), "in this import statement".into())
.into_err(pos),
)
}
}
// import expr as name ...
let (name, _) = match input.next().unwrap() {
(Token::Identifier(s), pos) => (s, pos),
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
};
state.modules.push(name.clone());
Ok(Stmt::Import(Box::new((expr, (name, settings.pos)))))
}
/// Parse an export statement.
#[cfg(not(feature = "no_module"))]
fn parse_export(
input: &mut TokenStream,
state: &mut ParseState,
_lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
settings.pos = eat_token(input, Token::Export);
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut exports = StaticVec::new();
loop {
let (id, id_pos) = match input.next().unwrap() {
(Token::Identifier(s), pos) => (s.clone(), pos),
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
};
let rename = if match_token(input, Token::As)? {
match input.next().unwrap() {
(Token::Identifier(s), pos) => Some((s.clone(), pos)),
(Token::Reserved(s), pos) if is_valid_identifier(s.chars()) => {
return Err(PERR::Reserved(s).into_err(pos));
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
}
} else {
None
};
exports.push(((id, id_pos), rename));
match input.peek().unwrap() {
(Token::Comma, _) => {
eat_token(input, Token::Comma);
}
(Token::Identifier(_), pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the list of exports".into(),
)
.into_err(*pos))
}
_ => break,
}
}
// Check for duplicating parameters
exports
.iter()
.enumerate()
.try_for_each(|(i, ((id1, _), _))| {
exports
.iter()
.skip(i + 1)
.find(|((id2, _), _)| id2 == id1)
.map_or_else(|| Ok(()), |((id2, pos), _)| Err((id2, *pos)))
})
.map_err(|(id2, pos)| PERR::DuplicatedExport(id2.to_string()).into_err(pos))?;
Ok(Stmt::Export(Box::new(exports)))
}
/// Parse a statement block.
fn parse_block(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
// Must start with {
settings.pos = match input.next().unwrap() {
(Token::LeftBrace, pos) => pos,
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(PERR::MissingToken(
Token::LeftBrace.into(),
"to start a statement block".into(),
)
.into_err(pos))
}
};
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut statements = StaticVec::new();
let prev_stack_len = state.stack.len();
let prev_mods_len = state.modules.len();
while !match_token(input, Token::RightBrace)? {
// Parse statements inside the block
settings.is_global = false;
let stmt = if let Some(s) = parse_stmt(input, state, lib, settings.level_up())? {
s
} else {
continue;
};
// See if it needs a terminating semicolon
let need_semicolon = !stmt.is_self_terminated();
statements.push(stmt);
match input.peek().unwrap() {
// { ... stmt }
(Token::RightBrace, _) => {
eat_token(input, Token::RightBrace);
break;
}
// { ... stmt;
(Token::SemiColon, _) if need_semicolon => {
eat_token(input, Token::SemiColon);
}
// { ... { stmt } ;
(Token::SemiColon, _) if !need_semicolon => (),
// { ... { stmt } ???
(_, _) if !need_semicolon => (),
// { ... stmt <error>
(Token::LexError(err), pos) => return Err(err.into_err(*pos)),
// { ... stmt ???
(_, pos) => {
// Semicolons are not optional between statements
return Err(PERR::MissingToken(
Token::SemiColon.into(),
"to terminate this statement".into(),
)
.into_err(*pos));
}
}
}
state.stack.truncate(prev_stack_len);
state.modules.truncate(prev_mods_len);
Ok(Stmt::Block(Box::new((statements, settings.pos))))
}
/// Parse an expression as a statement.
fn parse_expr_stmt(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Stmt, ParseError> {
settings.pos = input.peek().unwrap().1;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let expr = parse_expr(input, state, lib, settings.level_up())?;
let expr = parse_op_assignment_stmt(input, state, lib, expr, settings.level_up())?;
Ok(Stmt::Expr(Box::new(expr)))
}
/// Parse a single statement.
fn parse_stmt(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<Option<Stmt>, ParseError> {
use ScopeEntryType::{Constant, Normal};
let (token, token_pos) = match input.peek().unwrap() {
(Token::EOF, pos) => return Ok(Some(Stmt::Noop(*pos))),
x => x,
};
settings.pos = *token_pos;
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
match token {
// Semicolon - empty statement
Token::SemiColon => Ok(Some(Stmt::Noop(settings.pos))),
Token::LeftBrace => parse_block(input, state, lib, settings.level_up()).map(Some),
// fn ...
#[cfg(not(feature = "no_function"))]
Token::Fn if !settings.is_global => Err(PERR::WrongFnDefinition.into_err(settings.pos)),
#[cfg(not(feature = "no_function"))]
Token::Fn | Token::Private => {
let access = if matches!(token, Token::Private) {
eat_token(input, Token::Private);
FnAccess::Private
} else {
FnAccess::Public
};
match input.next().unwrap() {
(Token::Fn, pos) => {
let mut state = ParseState::new(
state.engine,
state.max_function_expr_depth,
state.max_function_expr_depth,
);
let settings = ParseSettings {
allow_if_expr: true,
allow_stmt_expr: true,
allow_anonymous_fn: true,
is_global: false,
is_function_scope: true,
is_breakable: false,
level: 0,
pos: pos,
};
let func = parse_fn(input, &mut state, lib, access, settings)?;
// Qualifiers (none) + function name + number of arguments.
let hash = calc_fn_hash(empty(), &func.name, func.params.len(), empty());
lib.insert(hash, func);
Ok(None)
}
(_, pos) => Err(PERR::MissingToken(
Token::Fn.into(),
format!("following '{}'", Token::Private.syntax()),
)
.into_err(pos)),
}
}
Token::If => parse_if(input, state, lib, settings.level_up()).map(Some),
Token::While => parse_while(input, state, lib, settings.level_up()).map(Some),
Token::Loop => parse_loop(input, state, lib, settings.level_up()).map(Some),
Token::For => parse_for(input, state, lib, settings.level_up()).map(Some),
Token::Continue if settings.is_breakable => {
let pos = eat_token(input, Token::Continue);
Ok(Some(Stmt::Continue(pos)))
}
Token::Break if settings.is_breakable => {
let pos = eat_token(input, Token::Break);
Ok(Some(Stmt::Break(pos)))
}
Token::Continue | Token::Break => Err(PERR::LoopBreak.into_err(settings.pos)),
Token::Return | Token::Throw => {
let return_type = match input.next().unwrap() {
(Token::Return, _) => ReturnType::Return,
(Token::Throw, _) => ReturnType::Exception,
_ => unreachable!(),
};
match input.peek().unwrap() {
// `return`/`throw` at <EOF>
(Token::EOF, pos) => Ok(Some(Stmt::ReturnWithVal(Box::new((
(return_type, *pos),
None,
))))),
// `return;` or `throw;`
(Token::SemiColon, _) => Ok(Some(Stmt::ReturnWithVal(Box::new((
(return_type, settings.pos),
None,
))))),
// `return` or `throw` with expression
(_, _) => {
let expr = parse_expr(input, state, lib, settings.level_up())?;
let pos = expr.position();
Ok(Some(Stmt::ReturnWithVal(Box::new((
(return_type, pos),
Some(expr),
)))))
}
}
}
Token::Let => parse_let(input, state, lib, Normal, settings.level_up()).map(Some),
Token::Const => parse_let(input, state, lib, Constant, settings.level_up()).map(Some),
#[cfg(not(feature = "no_module"))]
Token::Import => parse_import(input, state, lib, settings.level_up()).map(Some),
#[cfg(not(feature = "no_module"))]
Token::Export if !settings.is_global => Err(PERR::WrongExport.into_err(settings.pos)),
#[cfg(not(feature = "no_module"))]
Token::Export => parse_export(input, state, lib, settings.level_up()).map(Some),
_ => parse_expr_stmt(input, state, lib, settings.level_up()).map(Some),
}
}
/// Parse a function definition.
#[cfg(not(feature = "no_function"))]
fn parse_fn(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
access: FnAccess,
mut settings: ParseSettings,
) -> Result<ScriptFnDef, ParseError> {
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let name = match input.next().unwrap() {
(Token::Identifier(s), _) | (Token::Custom(s), _) | (Token::Reserved(s), _)
if s != KEYWORD_THIS && is_valid_identifier(s.chars()) =>
{
s
}
(_, pos) => return Err(PERR::FnMissingName.into_err(pos)),
};
match input.peek().unwrap() {
(Token::LeftParen, _) => eat_token(input, Token::LeftParen),
(_, pos) => return Err(PERR::FnMissingParams(name).into_err(*pos)),
};
let mut params = Vec::new();
if !match_token(input, Token::RightParen)? {
let sep_err = format!("to separate the parameters of function '{}'", name);
loop {
match input.next().unwrap() {
(Token::RightParen, _) => break,
(Token::Identifier(s), pos) => {
state.stack.push((s.clone(), ScopeEntryType::Normal));
params.push((s, pos))
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(PERR::MissingToken(
Token::RightParen.into(),
format!("to close the parameters list of function '{}'", name),
)
.into_err(pos))
}
}
match input.next().unwrap() {
(Token::RightParen, _) => break,
(Token::Comma, _) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(PERR::MissingToken(Token::Comma.into(), sep_err).into_err(pos))
}
}
}
}
// Check for duplicating parameters
params
.iter()
.enumerate()
.try_for_each(|(i, (p1, _))| {
params
.iter()
.skip(i + 1)
.find(|(p2, _)| p2 == p1)
.map_or_else(|| Ok(()), |(p2, pos)| Err((p2, *pos)))
})
.map_err(|(p, pos)| {
PERR::FnDuplicatedParam(name.to_string(), p.to_string()).into_err(pos)
})?;
// Parse function body
let body = match input.peek().unwrap() {
(Token::LeftBrace, _) => {
settings.is_breakable = false;
parse_block(input, state, lib, settings.level_up())?
}
(_, pos) => return Err(PERR::FnMissingBody(name).into_err(*pos)),
};
let params = params.into_iter().map(|(p, _)| p).collect();
Ok(ScriptFnDef {
name,
access,
params,
body,
pos: settings.pos,
})
}
/// Parse an anonymous function definition.
#[cfg(not(feature = "no_function"))]
fn parse_anon_fn(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
mut settings: ParseSettings,
) -> Result<(Expr, ScriptFnDef), ParseError> {
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let mut params = Vec::new();
if input.next().unwrap().0 != Token::Or {
if !match_token(input, Token::Pipe)? {
loop {
match input.next().unwrap() {
(Token::Pipe, _) => break,
(Token::Identifier(s), pos) => {
state.stack.push((s.clone(), ScopeEntryType::Normal));
params.push((s, pos))
}
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(PERR::MissingToken(
Token::Pipe.into(),
"to close the parameters list of anonymous function".into(),
)
.into_err(pos))
}
}
match input.next().unwrap() {
(Token::Pipe, _) => break,
(Token::Comma, _) => (),
(Token::LexError(err), pos) => return Err(err.into_err(pos)),
(_, pos) => {
return Err(PERR::MissingToken(
Token::Comma.into(),
"to separate the parameters of anonymous function".into(),
)
.into_err(pos))
}
}
}
}
}
// Check for duplicating parameters
params
.iter()
.enumerate()
.try_for_each(|(i, (p1, _))| {
params
.iter()
.skip(i + 1)
.find(|(p2, _)| p2 == p1)
.map_or_else(|| Ok(()), |(p2, pos)| Err((p2, *pos)))
})
.map_err(|(p, pos)| PERR::FnDuplicatedParam("".to_string(), p.to_string()).into_err(pos))?;
// Parse function body
settings.is_breakable = false;
let pos = input.peek().unwrap().1;
let body = parse_stmt(input, state, lib, settings.level_up())
.map(|stmt| stmt.unwrap_or_else(|| Stmt::Noop(pos)))?;
let params: StaticVec<_> = params.into_iter().map(|(p, _)| p).collect();
// Calculate hash
#[cfg(feature = "no_std")]
let mut s: AHasher = Default::default();
#[cfg(not(feature = "no_std"))]
let mut s = DefaultHasher::new();
s.write_usize(params.len());
params.iter().for_each(|a| a.hash(&mut s));
body.hash(&mut s);
let hash = s.finish();
// Create unique function name
let fn_name = format!("{}{}", FN_ANONYMOUS, hash);
let script = ScriptFnDef {
name: fn_name.clone(),
access: FnAccess::Public,
params,
body,
pos: settings.pos,
};
let expr = Expr::FnPointer(Box::new((fn_name.into(), settings.pos)));
Ok((expr, script))
}
impl Engine {
pub(crate) fn parse_global_expr(
&self,
input: &mut TokenStream,
scope: &Scope,
optimization_level: OptimizationLevel,
) -> Result<AST, ParseError> {
let mut functions = Default::default();
let mut state = ParseState::new(self, self.max_expr_depth, self.max_function_expr_depth);
let settings = ParseSettings {
allow_if_expr: false,
allow_stmt_expr: false,
allow_anonymous_fn: false,
is_global: true,
is_function_scope: false,
is_breakable: false,
level: 0,
pos: Position::none(),
};
let expr = parse_expr(input, &mut state, &mut functions, settings)?;
match input.peek().unwrap() {
(Token::EOF, _) => (),
// Return error if the expression doesn't end
(token, pos) => {
return Err(
PERR::BadInput(format!("Unexpected '{}'", token.syntax())).into_err(*pos)
)
}
}
let expr = vec![Stmt::Expr(Box::new(expr))];
Ok(
// Optimize AST
optimize_into_ast(self, scope, expr, Default::default(), optimization_level),
)
}
/// Parse the global level statements.
fn parse_global_level(
&self,
input: &mut TokenStream,
) -> Result<(Vec<Stmt>, Vec<ScriptFnDef>), ParseError> {
let mut statements: Vec<Stmt> = Default::default();
let mut functions = Default::default();
let mut state = ParseState::new(self, self.max_expr_depth, self.max_function_expr_depth);
while !input.peek().unwrap().0.is_eof() {
let settings = ParseSettings {
allow_if_expr: true,
allow_stmt_expr: true,
allow_anonymous_fn: true,
is_global: true,
is_function_scope: false,
is_breakable: false,
level: 0,
pos: Position::none(),
};
let stmt = if let Some(s) = parse_stmt(input, &mut state, &mut functions, settings)? {
s
} else {
continue;
};
let need_semicolon = !stmt.is_self_terminated();
statements.push(stmt);
match input.peek().unwrap() {
// EOF
(Token::EOF, _) => break,
// stmt ;
(Token::SemiColon, _) if need_semicolon => {
eat_token(input, Token::SemiColon);
}
// stmt ;
(Token::SemiColon, _) if !need_semicolon => (),
// { stmt } ???
(_, _) if !need_semicolon => (),
// stmt <error>
(Token::LexError(err), pos) => return Err(err.into_err(*pos)),
// stmt ???
(_, pos) => {
// Semicolons are not optional between statements
return Err(PERR::MissingToken(
Token::SemiColon.into(),
"to terminate this statement".into(),
)
.into_err(*pos));
}
}
}
Ok((statements, functions.into_iter().map(|(_, v)| v).collect()))
}
/// Run the parser on an input stream, returning an AST.
pub(crate) fn parse(
&self,
input: &mut TokenStream,
scope: &Scope,
optimization_level: OptimizationLevel,
) -> Result<AST, ParseError> {
let (statements, lib) = self.parse_global_level(input)?;
Ok(
// Optimize AST
optimize_into_ast(self, scope, statements, lib, optimization_level),
)
}
}
/// Map a `Dynamic` value to an expression.
///
/// Returns Some(expression) if conversion is successful. Otherwise None.
pub fn map_dynamic_to_expr(value: Dynamic, pos: Position) -> Option<Expr> {
match value.0 {
#[cfg(not(feature = "no_float"))]
Union::Float(value) => Some(Expr::FloatConstant(Box::new(FloatWrapper(value, pos)))),
Union::Unit(_) => Some(Expr::Unit(pos)),
Union::Int(value) => Some(Expr::IntegerConstant(Box::new((value, pos)))),
Union::Char(value) => Some(Expr::CharConstant(Box::new((value, pos)))),
Union::Str(value) => Some(Expr::StringConstant(Box::new((value.clone(), pos)))),
Union::Bool(true) => Some(Expr::True(pos)),
Union::Bool(false) => Some(Expr::False(pos)),
#[cfg(not(feature = "no_index"))]
Union::Array(array) => {
let items: Vec<_> = array
.into_iter()
.map(|x| map_dynamic_to_expr(x, pos))
.collect();
if items.iter().all(Option::is_some) {
Some(Expr::Array(Box::new((
items.into_iter().map(Option::unwrap).collect(),
pos,
))))
} else {
None
}
}
#[cfg(not(feature = "no_object"))]
Union::Map(map) => {
let items: Vec<_> = map
.into_iter()
.map(|(k, v)| ((k, pos), map_dynamic_to_expr(v, pos)))
.collect();
if items.iter().all(|(_, expr)| expr.is_some()) {
Some(Expr::Map(Box::new((
items
.into_iter()
.map(|((k, pos), expr)| ((k, pos), expr.unwrap()))
.collect(),
pos,
))))
} else {
None
}
}
_ => None,
}
}
| 34.45824 | 126 | 0.501022 |
e4e8ac511c52c3f189bc913882d4a0de144f2382 | 1,682 | use super::*;
use std::io::{Read, Write};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use util::Error;
#[cfg(test)]
mod handshake_message_certificate_test;
const HANDSHAKE_MESSAGE_CERTIFICATE_LENGTH_FIELD_SIZE: usize = 3;
#[derive(PartialEq, Debug)]
pub struct HandshakeMessageCertificate {
certificate: Vec<Vec<u8>>,
}
impl HandshakeMessageCertificate {
fn handshake_type() -> HandshakeType {
HandshakeType::Certificate
}
pub fn marshal<W: Write>(&self, writer: &mut W) -> Result<(), Error> {
let mut payload_size = 0;
for r in &self.certificate {
payload_size += HANDSHAKE_MESSAGE_CERTIFICATE_LENGTH_FIELD_SIZE + r.len();
}
// Total Payload Size
writer.write_u24::<BigEndian>(payload_size as u32)?;
for r in &self.certificate {
// Certificate Length
writer.write_u24::<BigEndian>(r.len() as u32)?;
// Certificate body
writer.write_all(r)?;
}
Ok(())
}
pub fn unmarshal<R: Read>(reader: &mut R) -> Result<Self, Error> {
let mut certificate: Vec<Vec<u8>> = vec![];
let payload_size = reader.read_u24::<BigEndian>()? as usize;
let mut offset = 0;
while offset < payload_size {
let certificate_len = reader.read_u24::<BigEndian>()? as usize;
offset += HANDSHAKE_MESSAGE_CERTIFICATE_LENGTH_FIELD_SIZE;
let mut buf = vec![0; certificate_len];
reader.read_exact(&mut buf)?;
offset += certificate_len;
certificate.push(buf);
}
Ok(HandshakeMessageCertificate { certificate })
}
}
| 26.698413 | 86 | 0.614744 |
ef469c51da2d2f6fbb30666a7cd730622900e4d3 | 17,066 | use std::error::Error;
use std::io::{self};
use std::sync::{atomic::AtomicBool, atomic::Ordering, Arc, RwLock};
use super::events::{Event, Events};
use crate::network::{NetworkInfo, PacketInfo, PacketType};
use termion::{event::Key, raw::IntoRawMode};
use tui::{
backend::TermionBackend,
layout::{Alignment, Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Span, Spans},
widgets::{Block, Borders, List, ListItem, ListState, Paragraph},
Terminal,
};
#[allow(unused_imports)]
use pnet::packet::{
arp::ArpPacket, icmp::IcmpPacket, icmpv6::Icmpv6Packet, tcp::TcpPacket, udp::UdpPacket,
};
/// Main function which renders UI on the terminal
pub fn draw_ui(
net_info: Arc<RwLock<NetworkInfo>>,
running: Arc<AtomicBool>,
) -> Result<(), Box<dyn Error>> {
let stdout = io::stdout().into_raw_mode()?;
let backend = TermionBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
let events = Events::new();
let mut packets_state_selected = true;
let mut packets_state = ListState::default();
let mut packets_info_state = ListState::default();
let mut packets_info_len: usize = 0;
while running.load(Ordering::Relaxed) {
terminal.draw(|f| {
// Setting the layout of the UI
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints(
[
Constraint::Percentage(80),
Constraint::Percentage(15),
Constraint::Percentage(5),
]
.as_ref(),
)
.split(f.size());
// Header for packet capture view
let header = Spans::from(Span::styled(
get_packets_ui_header(),
Style::default().fg(Color::Black).bg(Color::White),
));
// Getting info about packets captured
let items: Vec<ListItem> = net_info
.read()
.unwrap()
.packets
.iter()
.map(|i| {
let ptype = get_packet_info(i);
ListItem::new(Spans::from(ptype))
.style(Style::default().fg(Color::White).bg(Color::Black))
})
.collect();
let items = List::new(items)
.block(
Block::default()
.title(header)
.borders(Borders::ALL)
.style(Style::default().bg(Color::Black)),
)
.highlight_style(Style::default().bg(Color::Red).add_modifier(Modifier::BOLD));
// Rendering the packets that are captured
f.render_stateful_widget(items, chunks[0], &mut packets_state);
// Rendering logic for displaying packet information in the bottom window pane
if let Some(i) = packets_state.selected() {
if i < net_info.read().unwrap().packets.len() {
let items: Vec<ListItem> =
get_packet_description(&net_info.read().unwrap().packets[i])
.iter()
.map(|field| {
let field_val = field.to_string();
ListItem::new(Spans::from(field_val))
.style(Style::default().fg(Color::White).bg(Color::Black))
})
.collect();
packets_info_len = items.len();
let items = List::new(items)
.block(
Block::default()
.title("Packet Information")
.borders(Borders::ALL)
.style(Style::default().bg(Color::Black)),
)
.highlight_style(
Style::default().bg(Color::Red).add_modifier(Modifier::BOLD),
);
f.render_stateful_widget(items, chunks[1], &mut packets_info_state);
}
} else {
let items = List::new(vec![])
.block(
Block::default()
.title("Packet Information")
.borders(Borders::ALL)
.style(Style::default().bg(Color::Black)),
)
.highlight_style(Style::default().bg(Color::Red).add_modifier(Modifier::BOLD));
f.render_stateful_widget(items, chunks[1], &mut packets_info_state);
}
// Footer info rendering
let footer = vec![Spans::from(vec![
Span::raw(format!(
"Captured Packets: {} ",
net_info.read().unwrap().captured_packets
)),
Span::raw(format!(
"Dropped Packets: {} ",
net_info.read().unwrap().dropped_packets
)),
])];
let footer_para = Paragraph::new(footer)
.block(Block::default())
.style(Style::default().fg(Color::White).bg(Color::Black))
.alignment(Alignment::Left);
f.render_widget(footer_para, chunks[2]);
})?;
// Capture events from the keyboard
match events.next()? {
Event::Input(input) => match input {
Key::Char('q') => {
terminal.clear()?;
running.store(false, Ordering::SeqCst);
}
Key::Left => {
packets_state.select(None);
}
Key::Down => {
if packets_state_selected {
let i = match packets_state.selected() {
Some(i) => {
if i >= net_info.read().unwrap().packets.len() {
0
} else {
i + 1
}
}
None => 0,
};
packets_state.select(Some(i));
} else {
let i = match packets_info_state.selected() {
Some(i) => {
if i >= packets_info_len {
0
} else {
i + 1
}
}
None => 0,
};
packets_info_state.select(Some(i));
}
}
Key::Up => {
if packets_state_selected {
let i = match packets_state.selected() {
Some(i) => {
if i == 0 {
net_info.read().unwrap().packets.len() - 1
} else {
i - 1
}
}
None => 0,
};
packets_state.select(Some(i));
} else {
let i = match packets_info_state.selected() {
Some(i) => {
if i == 0 {
packets_info_len - 1
} else {
i - 1
}
}
None => 0,
};
packets_info_state.select(Some(i));
}
}
Key::Char('\t') => {
packets_state_selected = !packets_state_selected;
}
_ => {}
},
Event::Tick => {}
}
}
Ok(())
}
/// Get header of packet capture UI
fn get_packets_ui_header() -> String {
format!(
"{:<20} {:<20} {:<10} {:<6} {:<20}",
"Source", "Destination", "Protocol", "Length", "Info"
)
}
/// Get brief packet info
fn get_packet_info(packet: &PacketInfo) -> String {
match packet.packet_type {
PacketType::TCP => {
let raw_packet = packet.packet_data.packet();
let payload = packet.packet_data.payload();
let source_ip = if let Some(ip) = packet.source_ip {
ip.to_string()
} else {
"NA".to_string()
};
let dest_ip = if let Some(ip) = packet.dest_ip {
ip.to_string()
} else {
"NA".to_string()
};
let tcp = TcpPacket::new(raw_packet);
if let Some(tcp) = tcp {
format!(
"{:<20} {:<20} {:<10} {:<6} {:<6}->{:<6}",
source_ip,
dest_ip,
"TCP",
payload.to_vec().len(),
tcp.get_source(),
tcp.get_destination()
)
} else {
"TCP packet malformed".to_string()
}
}
PacketType::UDP => {
let raw_packet = packet.packet_data.packet();
let payload = packet.packet_data.payload();
let source_ip = if let Some(ip) = packet.source_ip {
ip.to_string()
} else {
"NA".to_string()
};
let dest_ip = if let Some(ip) = packet.dest_ip {
ip.to_string()
} else {
"NA".to_string()
};
let udp = UdpPacket::new(raw_packet);
if let Some(udp) = udp {
format!(
"{:<20} {:<20} {:<10} {:<6} {:<6}->{:<6}",
source_ip,
dest_ip,
"UDP",
payload.to_vec().len(),
udp.get_source(),
udp.get_destination()
)
} else {
"UDP packet malformed".to_string()
}
}
PacketType::ARP => {
let raw_packet = packet.packet_data.packet();
let payload = packet.packet_data.payload();
let arp = ArpPacket::new(raw_packet);
if let Some(arp) = arp {
format!(
"{:<20} {:<20} {:<10} {:<6} {:?}",
arp.get_sender_hw_addr(),
arp.get_target_hw_addr(),
"ARP",
payload.to_vec().len(),
arp.get_operation()
)
} else {
"ARP malformed".to_string()
}
}
PacketType::ICMP => {
let raw_packet = packet.packet_data.packet();
let payload = packet.packet_data.payload();
let source_ip = if let Some(ip) = packet.source_ip {
ip.to_string()
} else {
"NA".to_string()
};
let dest_ip = if let Some(ip) = packet.dest_ip {
ip.to_string()
} else {
"NA".to_string()
};
let icmp = IcmpPacket::new(raw_packet);
// TODO: Improve print information based on ICMP Type
if let Some(icmp) = icmp {
format!(
"{:<20} {:<20} {:<10} {:<6} {:?}",
source_ip,
dest_ip,
"ICMP",
payload.to_vec().len(),
icmp.get_icmp_code()
)
} else {
"ICMP packet malformed".to_string()
}
}
// TODO: Print information for ICMP
PacketType::ICMPv6 => "ICMPv6".to_string(),
}
}
/// Get detailed packet description
fn get_packet_description(packet: &PacketInfo) -> Vec<String> {
let mut pkt_desc: Vec<String> = vec![];
match packet.packet_type {
PacketType::TCP => {
let raw_packet = packet.packet_data.packet();
// let payload = packet.packet_data.payload().to_ascii_lowercase();
if let Some(ip) = packet.source_ip {
pkt_desc.push(format!("Source IP: {}", ip.to_string()));
} else {
pkt_desc.push(format!("Source IP: {}", "NA".to_string()));
}
if let Some(ip) = packet.dest_ip {
pkt_desc.push(format!("Destination IP: {}", ip.to_string()));
} else {
pkt_desc.push(format!("Destination IP: {}", "NA".to_string()));
}
let tcp = TcpPacket::new(raw_packet);
if let Some(tcp) = tcp {
pkt_desc.push(format!("Source Port: {}", tcp.get_source()));
pkt_desc.push(format!("Destination Port: {}", tcp.get_destination()));
pkt_desc.push(format!("Sequence Number: {}", tcp.get_sequence()));
pkt_desc.push(format!(
"Acknowledgement Number: {}",
tcp.get_acknowledgement()
));
pkt_desc.push(format!("Flags: {:b}", tcp.get_flags()));
pkt_desc.push(format!("Window: {}", tcp.get_window()));
}
}
PacketType::UDP => {
let raw_packet = packet.packet_data.packet();
// let payload = packet.packet_data.payload();
if let Some(ip) = packet.source_ip {
pkt_desc.push(format!("Source IP: {}", ip.to_string()));
} else {
pkt_desc.push(format!("Source IP: {}", "NA".to_string()));
}
if let Some(ip) = packet.dest_ip {
pkt_desc.push(format!("Destination IP: {}", ip.to_string()));
} else {
pkt_desc.push(format!("Destination IP: {}", "NA".to_string()));
}
let udp = UdpPacket::new(raw_packet);
if let Some(udp) = udp {
pkt_desc.push(format!("Source Port: {}", udp.get_source()));
pkt_desc.push(format!("Destination Port: {}", udp.get_destination()));
}
}
PacketType::ARP => {
let raw_packet = packet.packet_data.packet();
// let payload = packet.packet_data.payload();
let arp = ArpPacket::new(raw_packet);
if let Some(arp) = arp {
pkt_desc.push(format!("Hardware Type: {:?}", arp.get_hardware_type()));
pkt_desc.push(format!("Protocol Type: {:?}", arp.get_protocol_type()));
// TODO: Elaborate on the ARP option
pkt_desc.push(format!("Operation: {:?}", arp.get_operation()));
pkt_desc.push(format!(
"Sender Hardware Address: {}",
arp.get_sender_hw_addr()
));
pkt_desc.push(format!(
"Target Hardware Address: {}",
arp.get_target_hw_addr()
));
pkt_desc.push(format!(
"Sender IP Address: {}",
arp.get_sender_proto_addr()
));
pkt_desc.push(format!(
"Target IP Address: {}",
arp.get_target_proto_addr()
));
}
}
PacketType::ICMP => {
let raw_packet = packet.packet_data.packet();
// let payload = packet.packet_data.payload();
if let Some(ip) = packet.source_ip {
pkt_desc.push(format!("Source IP: {}", ip.to_string()));
} else {
pkt_desc.push(format!("Source IP: {}", "NA".to_string()));
}
if let Some(ip) = packet.dest_ip {
pkt_desc.push(format!("Destination IP: {}", ip.to_string()));
} else {
pkt_desc.push(format!("Destination IP: {}", "NA".to_string()));
}
let icmp = IcmpPacket::new(raw_packet);
// TODO: Expand description based on ICMP type
if let Some(icmp) = icmp {
pkt_desc.push(format!("ICMP Type: {:?}", icmp.get_icmp_type()));
pkt_desc.push(format!("ICMP Code: {:?}", icmp.get_icmp_code()));
}
}
// TODO: Packet description for ICMPv6 packets
PacketType::ICMPv6 => pkt_desc.push("None".to_string()),
};
pkt_desc
}
| 36.622318 | 99 | 0.421716 |
01a5691223bfc6ef4db924e6424432aa37970eb2 | 12,517 | use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{differing_macro_contexts, path_to_local, usage::is_potentially_mutated};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, PathSegment, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::Ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for calls of `unwrap[_err]()` that cannot fail.
///
/// ### Why is this bad?
/// Using `if let` or `match` is more idiomatic.
///
/// ### Example
/// ```rust
/// # let option = Some(0);
/// # fn do_something_with(_x: usize) {}
/// if option.is_some() {
/// do_something_with(option.unwrap())
/// }
/// ```
///
/// Could be written:
///
/// ```rust
/// # let option = Some(0);
/// # fn do_something_with(_x: usize) {}
/// if let Some(value) = option {
/// do_something_with(value)
/// }
/// ```
#[clippy::version = "pre 1.29.0"]
pub UNNECESSARY_UNWRAP,
complexity,
"checks for calls of `unwrap[_err]()` that cannot fail"
}
declare_clippy_lint! {
/// ### What it does
/// Checks for calls of `unwrap[_err]()` that will always fail.
///
/// ### Why is this bad?
/// If panicking is desired, an explicit `panic!()` should be used.
///
/// ### Known problems
/// This lint only checks `if` conditions not assignments.
/// So something like `let x: Option<()> = None; x.unwrap();` will not be recognized.
///
/// ### Example
/// ```rust
/// # let option = Some(0);
/// # fn do_something_with(_x: usize) {}
/// if option.is_none() {
/// do_something_with(option.unwrap())
/// }
/// ```
///
/// This code will always panic. The if condition should probably be inverted.
#[clippy::version = "pre 1.29.0"]
pub PANICKING_UNWRAP,
correctness,
"checks for calls of `unwrap[_err]()` that will always fail"
}
/// Visitor that keeps track of which variables are unwrappable.
struct UnwrappableVariablesVisitor<'a, 'tcx> {
unwrappables: Vec<UnwrapInfo<'tcx>>,
cx: &'a LateContext<'tcx>,
}
/// What kind of unwrappable this is.
#[derive(Copy, Clone, Debug)]
enum UnwrappableKind {
Option,
Result,
}
impl UnwrappableKind {
fn success_variant_pattern(self) -> &'static str {
match self {
UnwrappableKind::Option => "Some(..)",
UnwrappableKind::Result => "Ok(..)",
}
}
fn error_variant_pattern(self) -> &'static str {
match self {
UnwrappableKind::Option => "None",
UnwrappableKind::Result => "Err(..)",
}
}
}
/// Contains information about whether a variable can be unwrapped.
#[derive(Copy, Clone, Debug)]
struct UnwrapInfo<'tcx> {
/// The variable that is checked
local_id: HirId,
/// The if itself
if_expr: &'tcx Expr<'tcx>,
/// The check, like `x.is_ok()`
check: &'tcx Expr<'tcx>,
/// The check's name, like `is_ok`
check_name: &'tcx PathSegment<'tcx>,
/// The branch where the check takes place, like `if x.is_ok() { .. }`
branch: &'tcx Expr<'tcx>,
/// Whether `is_some()` or `is_ok()` was called (as opposed to `is_err()` or `is_none()`).
safe_to_unwrap: bool,
/// What kind of unwrappable this is.
kind: UnwrappableKind,
/// If the check is the entire condition (`if x.is_ok()`) or only a part of it (`foo() &&
/// x.is_ok()`)
is_entire_condition: bool,
}
/// Collects the information about unwrappable variables from an if condition
/// The `invert` argument tells us whether the condition is negated.
fn collect_unwrap_info<'tcx>(
cx: &LateContext<'tcx>,
if_expr: &'tcx Expr<'_>,
expr: &'tcx Expr<'_>,
branch: &'tcx Expr<'_>,
invert: bool,
is_entire_condition: bool,
) -> Vec<UnwrapInfo<'tcx>> {
fn is_relevant_option_call(cx: &LateContext<'_>, ty: Ty<'_>, method_name: &str) -> bool {
is_type_diagnostic_item(cx, ty, sym::Option) && ["is_some", "is_none"].contains(&method_name)
}
fn is_relevant_result_call(cx: &LateContext<'_>, ty: Ty<'_>, method_name: &str) -> bool {
is_type_diagnostic_item(cx, ty, sym::Result) && ["is_ok", "is_err"].contains(&method_name)
}
if let ExprKind::Binary(op, left, right) = &expr.kind {
match (invert, op.node) {
(false, BinOpKind::And | BinOpKind::BitAnd) | (true, BinOpKind::Or | BinOpKind::BitOr) => {
let mut unwrap_info = collect_unwrap_info(cx, if_expr, left, branch, invert, false);
unwrap_info.append(&mut collect_unwrap_info(cx, if_expr, right, branch, invert, false));
return unwrap_info;
},
_ => (),
}
} else if let ExprKind::Unary(UnOp::Not, expr) = &expr.kind {
return collect_unwrap_info(cx, if_expr, expr, branch, !invert, false);
} else {
if_chain! {
if let ExprKind::MethodCall(method_name, _, args, _) = &expr.kind;
if let Some(local_id) = path_to_local(&args[0]);
let ty = cx.typeck_results().expr_ty(&args[0]);
let name = method_name.ident.as_str();
if is_relevant_option_call(cx, ty, name) || is_relevant_result_call(cx, ty, name);
then {
assert!(args.len() == 1);
let unwrappable = match name.as_ref() {
"is_some" | "is_ok" => true,
"is_err" | "is_none" => false,
_ => unreachable!(),
};
let safe_to_unwrap = unwrappable != invert;
let kind = if is_type_diagnostic_item(cx, ty, sym::Option) {
UnwrappableKind::Option
} else {
UnwrappableKind::Result
};
return vec![
UnwrapInfo {
local_id,
if_expr,
check: expr,
check_name: method_name,
branch,
safe_to_unwrap,
kind,
is_entire_condition,
}
]
}
}
}
Vec::new()
}
impl<'a, 'tcx> UnwrappableVariablesVisitor<'a, 'tcx> {
fn visit_branch(
&mut self,
if_expr: &'tcx Expr<'_>,
cond: &'tcx Expr<'_>,
branch: &'tcx Expr<'_>,
else_branch: bool,
) {
let prev_len = self.unwrappables.len();
for unwrap_info in collect_unwrap_info(self.cx, if_expr, cond, branch, else_branch, true) {
if is_potentially_mutated(unwrap_info.local_id, cond, self.cx)
|| is_potentially_mutated(unwrap_info.local_id, branch, self.cx)
{
// if the variable is mutated, we don't know whether it can be unwrapped:
continue;
}
self.unwrappables.push(unwrap_info);
}
walk_expr(self, branch);
self.unwrappables.truncate(prev_len);
}
}
impl<'a, 'tcx> Visitor<'tcx> for UnwrappableVariablesVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
// Shouldn't lint when `expr` is in macro.
if in_external_macro(self.cx.tcx.sess, expr.span) {
return;
}
if let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr) {
walk_expr(self, cond);
self.visit_branch(expr, cond, then, false);
if let Some(else_inner) = r#else {
self.visit_branch(expr, cond, else_inner, true);
}
} else {
// find `unwrap[_err]()` calls:
if_chain! {
if let ExprKind::MethodCall(method_name, _, [self_arg, ..], _) = expr.kind;
if let Some(id) = path_to_local(self_arg);
if [sym::unwrap, sym::expect, sym!(unwrap_err)].contains(&method_name.ident.name);
let call_to_unwrap = [sym::unwrap, sym::expect].contains(&method_name.ident.name);
if let Some(unwrappable) = self.unwrappables.iter()
.find(|u| u.local_id == id);
// Span contexts should not differ with the conditional branch
if !differing_macro_contexts(unwrappable.branch.span, expr.span);
if !differing_macro_contexts(unwrappable.branch.span, unwrappable.check.span);
then {
if call_to_unwrap == unwrappable.safe_to_unwrap {
let is_entire_condition = unwrappable.is_entire_condition;
let unwrappable_variable_name = self.cx.tcx.hir().name(unwrappable.local_id);
let suggested_pattern = if call_to_unwrap {
unwrappable.kind.success_variant_pattern()
} else {
unwrappable.kind.error_variant_pattern()
};
span_lint_and_then(
self.cx,
UNNECESSARY_UNWRAP,
expr.span,
&format!(
"called `{}` on `{}` after checking its variant with `{}`",
method_name.ident.name,
unwrappable_variable_name,
unwrappable.check_name.ident.as_str(),
),
|diag| {
if is_entire_condition {
diag.span_suggestion(
unwrappable.check.span.with_lo(unwrappable.if_expr.span.lo()),
"try",
format!(
"if let {} = {}",
suggested_pattern,
unwrappable_variable_name,
),
// We don't track how the unwrapped value is used inside the
// block or suggest deleting the unwrap, so we can't offer a
// fixable solution.
Applicability::Unspecified,
);
} else {
diag.span_label(unwrappable.check.span, "the check is happening here");
diag.help("try using `if let` or `match`");
}
},
);
} else {
span_lint_and_then(
self.cx,
PANICKING_UNWRAP,
expr.span,
&format!("this call to `{}()` will always panic",
method_name.ident.name),
|diag| { diag.span_label(unwrappable.check.span, "because of this check"); },
);
}
}
}
walk_expr(self, expr);
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
declare_lint_pass!(Unwrap => [PANICKING_UNWRAP, UNNECESSARY_UNWRAP]);
impl<'tcx> LateLintPass<'tcx> for Unwrap {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
kind: FnKind<'tcx>,
decl: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
span: Span,
fn_id: HirId,
) {
if span.from_expansion() {
return;
}
let mut v = UnwrappableVariablesVisitor {
cx,
unwrappables: Vec::new(),
};
walk_fn(&mut v, kind, decl, body.id(), span, fn_id);
}
}
| 38.045593 | 107 | 0.51458 |
acdbf14af395ff8626bb7733c82166786d6f12ea | 37,112 | #![allow(non_snake_case)]
use super::super::ast::*;
use super::super::scanner::Token;
use super::super::symbol_table::*;
use super::super::visitors::*;
use crate::frame_c::utils::SystemHierarchy;
pub struct SmcatVisitor {
_compiler_version: String,
pub code: String,
pub dent: usize,
pub current_state_name_opt: Option<String>,
current_event_ret_type: String,
arcanium: Arcanum,
symbol_config: SymbolConfig,
first_event_handler: bool,
system_name: String,
first_state_name: String,
states: String,
transitions: String,
system_hierarchy: SystemHierarchy,
event_handler_msg: String,
}
impl SmcatVisitor {
//* --------------------------------------------------------------------- *//
pub fn new(
arcanium: Arcanum,
system_hierarchy: SystemHierarchy,
compiler_version: &str,
_comments: Vec<Token>,
) -> SmcatVisitor {
SmcatVisitor {
_compiler_version: compiler_version.to_string(),
code: String::from(""),
dent: 0,
current_state_name_opt: None,
current_event_ret_type: String::new(),
arcanium,
symbol_config: SymbolConfig::new(),
first_event_handler: true,
system_name: String::new(),
first_state_name: String::new(),
states: String::new(),
transitions: String::new(),
system_hierarchy,
event_handler_msg: String::new(),
}
}
//* --------------------------------------------------------------------- *//
pub fn get_code(&self) -> String {
self.code.clone()
}
//* --------------------------------------------------------------------- *//
fn generate_states(
&self,
node_name: &str,
is_system_node: bool,
indent: usize,
output: &mut String,
) {
let mut actual_indent = indent;
let node = self.system_hierarchy.get_node(node_name).unwrap();
let has_children = !(&node.children.is_empty());
if !is_system_node {
actual_indent += 1;
if has_children {
output.push_str(&format!("{}{} {{\n", self.specifiy_dent(indent), node_name));
} else {
output.push_str(&format!("{}{}, \n", self.specifiy_dent(indent), node_name));
}
}
for child_node_name in &node.children {
let child_node = self.system_hierarchy.get_node(&child_node_name).unwrap();
self.generate_states(&child_node.name, false, actual_indent, output);
}
// change last coma to semicolon
if has_children {
if let Some(location) = output.rfind(',') {
output.replace_range(location..location + 1, ";")
}
}
if !is_system_node && has_children {
output.push_str(&format!("{}}},\n", self.specifiy_dent(indent)));
}
}
//* --------------------------------------------------------------------- *//
pub fn run(&mut self, system_node: &SystemNode) {
system_node.accept(self);
}
//* --------------------------------------------------------------------- *//
fn add_code(&mut self, s: &str) {
self.code.push_str(s);
}
//* --------------------------------------------------------------------- *//
fn newline(&mut self) {
self.code.push_str(&*format!("\n{}", self.dent()));
}
//* --------------------------------------------------------------------- *//
fn dent(&self) -> String {
(0..self.dent).map(|_| " ").collect::<String>()
}
//* --------------------------------------------------------------------- *//
fn specifiy_dent(&self, dent: usize) -> String {
(0..dent).map(|_| " ").collect::<String>()
}
//* --------------------------------------------------------------------- *//
fn visit_decl_stmts(&mut self, decl_stmt_types: &[DeclOrStmtType]) {
for decl_stmt_t in decl_stmt_types.iter() {
match decl_stmt_t {
DeclOrStmtType::VarDeclT { .. } => {}
DeclOrStmtType::StmtT { stmt_t } => {
match stmt_t {
StatementType::ExpressionStmt { expr_stmt_t: _ } => {}
StatementType::TransitionStmt {
transition_statement,
} => {
transition_statement.accept(self);
}
StatementType::TestStmt { test_stmt_node } => {
test_stmt_node.accept(self);
}
StatementType::StateStackStmt {
state_stack_operation_statement_node,
} => {
state_stack_operation_statement_node.accept(self);
}
StatementType::ChangeStateStmt { change_state_stmt } => {
change_state_stmt.accept(self);
}
StatementType::NoStmt => {
// TODO
panic!("todo");
}
}
}
}
}
}
//* --------------------------------------------------------------------- *//
fn generate_state_ref_change_state(
&mut self,
change_state_stmt_node: &ChangeStateStatementNode,
) {
let target_state_name = match &change_state_stmt_node.state_context_t {
StateContextType::StateRef { state_context_node } => {
&state_context_node.state_ref_node.name
}
_ => panic!("TODO"),
};
let mut current_state: String = "??".to_string();
if let Some(state_name) = &self.current_state_name_opt {
current_state = state_name.clone();
}
let label = match &change_state_stmt_node.label_opt {
Some(label) => {
format!("{};", label.clone())
}
None => {
format!("{};", self.event_handler_msg.clone())
}
};
let transition_code = &format!(
"{} => {} [color=\"grey\"] : {}\n",
current_state,
self.format_target_state_name(target_state_name),
label
);
self.transitions.push_str(transition_code);
}
//* --------------------------------------------------------------------- *//
fn generate_state_ref_transition(&mut self, transition_statement: &TransitionStatementNode) {
let target_state_name = match &transition_statement.target_state_context_t {
StateContextType::StateRef { state_context_node } => {
&state_context_node.state_ref_node.name
}
_ => panic!("TODO"),
};
let _state_ref_code = self.format_target_state_name(target_state_name);
match &transition_statement.label_opt {
Some(_label) => {}
None => {}
}
let mut current_state: String = "??".to_string();
if let Some(state_name) = &self.current_state_name_opt {
current_state = state_name.clone();
}
let label = match &transition_statement.label_opt {
Some(label) => label.clone(),
None => self.event_handler_msg.clone(),
};
let transition_code = &format!(
"{} => {} : {};\n",
current_state,
self.format_target_state_name(target_state_name),
label
);
self.transitions.push_str(transition_code);
}
//* --------------------------------------------------------------------- *//
fn format_target_state_name(&self, state_name: &str) -> String {
state_name.to_string()
}
//* --------------------------------------------------------------------- *//
// TODO: Review if this is correct handling. At least with regular statecharts,
// each state with children can have a separate history that's used to determine
// initial child state on reentry to parent state
fn generate_state_stack_pop_transition(
&mut self,
transition_statement: &TransitionStatementNode,
) {
let label = match &transition_statement.label_opt {
Some(label) => label,
None => &self.event_handler_msg,
};
// .deephistory suffix overrides target state label with H* and sets shape to
// circle
self.transitions.push_str(&format!(
"{} => H*.deephistory : {};\n",
&self.current_state_name_opt.as_ref().unwrap(),
label
));
}
}
//* --------------------------------------------------------------------- *//
impl AstVisitor for SmcatVisitor {
//* --------------------------------------------------------------------- *//
fn visit_system_node(&mut self, system_node: &SystemNode) -> AstVisitorReturnType {
self.system_name = system_node.name.clone();
// First state name needed for machinery.
// Don't generate if there isn't at least one state.
if let Some(first_state) = system_node.get_first_state() {
self.first_state_name = first_state.borrow().name.clone();
self.add_code("initial,\n");
self.transitions
.push_str(&format!("initial => \"{}\";\n", self.first_state_name));
}
if let Some(machine_block_node) = &system_node.machine_block_node_opt {
machine_block_node.accept(self);
}
self.add_code(&self.states.clone());
self.newline();
self.add_code(&self.transitions.clone());
AstVisitorReturnType::SystemNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_frame_messages_enum(
&mut self,
_interface_block_node: &InterfaceBlockNode,
) -> AstVisitorReturnType {
panic!("Error - visit_frame_messages_enum() only used in Rust.");
// AstVisitorReturnType::InterfaceBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_interface_parameters(
&mut self,
_interface_block_node: &InterfaceBlockNode,
) -> AstVisitorReturnType {
panic!("visit_interface_parameters() not valid for target language.");
// AstVisitorReturnType::InterfaceBlockNode {}
}
fn visit_interface_method_call_expression_node(
&mut self,
_interface_method_call_expr_node: &InterfaceMethodCallExprNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::InterfaceMethodCallExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_interface_method_call_expression_node_to_string(
&mut self,
_interface_method_call_expr_node: &InterfaceMethodCallExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::InterfaceMethodCallExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_interface_block_node(
&mut self,
_interface_block_node: &InterfaceBlockNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::InterfaceBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_interface_method_node(
&mut self,
_interface_method_node: &InterfaceMethodNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::InterfaceMethodNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_machine_block_node(
&mut self,
machine_block_node: &MachineBlockNode,
) -> AstVisitorReturnType {
let mut output = String::new();
let sys_name = self.system_name.clone();
let _system_node = self.system_hierarchy.get_system_node().unwrap();
self.generate_states(&sys_name, true, 0, &mut output);
self.states = output;
for state_node_rcref in &machine_block_node.states {
state_node_rcref.borrow().accept(self);
}
AstVisitorReturnType::MachineBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_actions_block_node(
&mut self,
_actions_block_node: &ActionsBlockNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::ActionBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_action_node_rust_trait(&mut self, _: &ActionsBlockNode) -> AstVisitorReturnType {
panic!("Error - visit_action_node_rust_trait() not implemented.");
// AstVisitorReturnType::ActionBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_actions_node_rust_impl(&mut self, _: &ActionsBlockNode) -> AstVisitorReturnType {
panic!("Error - visit_actions_node_rust_impl() not implemented.");
// AstVisitorReturnType::ActionBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_domain_block_node(
&mut self,
_domain_block_node: &DomainBlockNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::DomainBlockNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_state_node(&mut self, state_node: &StateNode) -> AstVisitorReturnType {
self.current_state_name_opt = Some(state_node.name.clone());
let _state_symbol = match self.arcanium.get_state(&state_node.name) {
Some(state_symbol) => state_symbol,
None => panic!("TODO"),
};
self.first_event_handler = true; // context for formatting
if !state_node.evt_handlers_rcref.is_empty() {
for evt_handler_node in &state_node.evt_handlers_rcref {
evt_handler_node.as_ref().borrow().accept(self);
}
}
match &state_node.dispatch_opt {
Some(_dispatch) => {}
None => {}
}
self.current_state_name_opt = None;
AstVisitorReturnType::StateNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_event_handler_node(
&mut self,
evt_handler_node: &EventHandlerNode,
) -> AstVisitorReturnType {
self.current_event_ret_type = evt_handler_node.get_event_ret_type();
if let MessageType::CustomMessage { message_node } = &evt_handler_node.msg_t {
self.event_handler_msg = format!("|{}|", message_node.name);
} else {
// AnyMessage ( ||* )
self.event_handler_msg = "||*".to_string();
}
if let MessageType::CustomMessage { message_node } = &evt_handler_node.msg_t {
let (_msg, _, _) = EventSymbol::get_event_msg(
&self.symbol_config,
&Some(evt_handler_node.state_name.clone()),
&message_node.name,
);
}
// Generate statements
self.visit_decl_stmts(&evt_handler_node.statements);
// this controls formatting here
self.first_event_handler = false;
self.current_event_ret_type = String::new();
AstVisitorReturnType::EventHandlerNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_event_handler_terminator_node(
&mut self,
_evt_handler_terminator_node: &TerminatorExpr,
) -> AstVisitorReturnType {
AstVisitorReturnType::EventHandlerTerminatorNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_statement_node(
&mut self,
_method_call_statement: &CallStmtNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::CallStatementNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_expression_node(&mut self, _method_call: &CallExprNode) -> AstVisitorReturnType {
AstVisitorReturnType::CallExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_expression_node_to_string(
&mut self,
_method_call: &CallExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::CallExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_action_call_expression_node(
&mut self,
_action_call: &ActionCallExprNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::ActionCallExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_action_call_expression_node_to_string(
&mut self,
_action_call: &ActionCallExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::ActionCallExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_expr_list_node(
&mut self,
_call_expr_list: &CallExprListNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::CallExprListNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_expr_list_node_to_string(
&mut self,
_call_expr_list: &CallExprListNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::CallExprListNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_action_call_statement_node(
&mut self,
_action_call_stmt_node: &ActionCallStmtNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::ActionCallStatementNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_transition_statement_node(
&mut self,
transition_statement: &TransitionStatementNode,
) -> AstVisitorReturnType {
match &transition_statement.target_state_context_t {
StateContextType::StateRef { .. } => {
self.generate_state_ref_transition(transition_statement)
}
StateContextType::StateStackPop {} => {
self.generate_state_stack_pop_transition(transition_statement)
}
};
AstVisitorReturnType::CallStatementNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_state_ref_node(&mut self, state_ref: &StateRefNode) -> AstVisitorReturnType {
self.add_code(&state_ref.name);
AstVisitorReturnType::StateRefNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_change_state_statement_node(
&mut self,
change_state_stmt_node: &ChangeStateStatementNode,
) -> AstVisitorReturnType {
match &change_state_stmt_node.state_context_t {
StateContextType::StateRef { .. } => {
self.generate_state_ref_change_state(change_state_stmt_node)
}
StateContextType::StateStackPop {} => panic!("TODO - not implemented"),
};
AstVisitorReturnType::ChangeStateStmtNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_parameter_node(&mut self, _parameter_node: &ParameterNode) -> AstVisitorReturnType {
AstVisitorReturnType::ParameterNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_dispatch_node(&mut self, _dispatch_node: &DispatchNode) -> AstVisitorReturnType {
AstVisitorReturnType::DispatchNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_test_statement_node(
&mut self,
test_stmt_node: &TestStatementNode,
) -> AstVisitorReturnType {
match &test_stmt_node.test_t {
TestType::BoolTest { bool_test_node } => {
bool_test_node.accept(self);
}
TestType::StringMatchTest {
string_match_test_node,
} => {
string_match_test_node.accept(self);
}
TestType::NumberMatchTest {
number_match_test_node,
} => {
number_match_test_node.accept(self);
}
}
AstVisitorReturnType::TestStatementNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_bool_test_node(&mut self, bool_test_node: &BoolTestNode) -> AstVisitorReturnType {
for branch_node in &bool_test_node.conditional_branch_nodes {
branch_node.expr_t.accept(self);
branch_node.accept(self);
}
// (':' bool_test_else_branch)?
if let Some(bool_test_else_branch_node) = &bool_test_node.else_branch_node_opt {
bool_test_else_branch_node.accept(self);
}
AstVisitorReturnType::BoolTestNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_chain_literal_statement_node(
&mut self,
_method_call_chain_literal_stmt_node: &CallChainLiteralStmtNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::CallChainLiteralStmtNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_chain_literal_expr_node(
&mut self,
_method_call_chain_expression_node: &CallChainLiteralExprNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::CallChainLiteralExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_call_chain_literal_expr_node_to_string(
&mut self,
_method_call_chain_expression_node: &CallChainLiteralExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
panic!("TODO");
}
//* --------------------------------------------------------------------- *//
fn visit_bool_test_conditional_branch_node(
&mut self,
bool_test_true_branch_node: &BoolTestConditionalBranchNode,
) -> AstVisitorReturnType {
self.visit_decl_stmts(&bool_test_true_branch_node.statements);
AstVisitorReturnType::BoolTestConditionalBranchNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_bool_test_else_branch_node(
&mut self,
bool_test_else_branch_node: &BoolTestElseBranchNode,
) -> AstVisitorReturnType {
self.visit_decl_stmts(&bool_test_else_branch_node.statements);
AstVisitorReturnType::BoolTestElseBranchNode {}
}
//* --------------------------------------------------------------------- *//
// Used in event string matching transitions
fn visit_string_match_test_node(
&mut self,
string_match_test_node: &StringMatchTestNode,
) -> AstVisitorReturnType {
for match_branch_node in &string_match_test_node.match_branch_nodes {
// TODO: use string_match_test_node.expr_t.accept(self) ?
match &string_match_test_node.expr_t {
ExprType::CallExprT {
call_expr_node: method_call_expr_node,
} => method_call_expr_node.accept(self),
ExprType::ActionCallExprT {
action_call_expr_node,
} => action_call_expr_node.accept(self),
ExprType::CallChainLiteralExprT {
call_chain_expr_node,
} => call_chain_expr_node.accept(self),
ExprType::VariableExprT { var_node: id_node } => id_node.accept(self),
_ => panic!("TODO"),
}
match_branch_node.accept(self);
}
// (':' string_test_else_branch)?
if let Some(string_match_else_branch_node) = &string_match_test_node.else_branch_node_opt {
string_match_else_branch_node.accept(self);
}
AstVisitorReturnType::StringMatchTestNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_string_match_test_match_branch_node(
&mut self,
string_match_test_match_branch_node: &StringMatchTestMatchBranchNode,
) -> AstVisitorReturnType {
self.visit_decl_stmts(&string_match_test_match_branch_node.statements);
AstVisitorReturnType::StringMatchTestMatchBranchNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_string_match_test_else_branch_node(
&mut self,
string_match_test_else_branch_node: &StringMatchTestElseBranchNode,
) -> AstVisitorReturnType {
self.visit_decl_stmts(&string_match_test_else_branch_node.statements);
AstVisitorReturnType::StringMatchElseBranchNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_string_match_test_pattern_node(
&mut self,
_string_match_test_else_branch_node: &StringMatchTestPatternNode,
) -> AstVisitorReturnType {
panic!("todo");
}
//-----------------------------------------------------//
fn visit_number_match_test_node(
&mut self,
number_match_test_node: &NumberMatchTestNode,
) -> AstVisitorReturnType {
for match_branch_node in &number_match_test_node.match_branch_nodes {
// self.add_code(&format!("{} (", if_or_else_if));
match &number_match_test_node.expr_t {
ExprType::CallExprT {
call_expr_node: method_call_expr_node,
} => method_call_expr_node.accept(self),
ExprType::ActionCallExprT {
action_call_expr_node,
} => action_call_expr_node.accept(self),
ExprType::CallChainLiteralExprT {
call_chain_expr_node,
} => call_chain_expr_node.accept(self),
ExprType::VariableExprT { var_node: id_node } => id_node.accept(self),
_ => panic!("TODO"),
}
let mut first_match = true;
for _match_number in &match_branch_node.number_match_pattern_nodes {
if first_match {
first_match = false;
} else {
match &number_match_test_node.expr_t {
ExprType::CallExprT {
call_expr_node: method_call_expr_node,
} => method_call_expr_node.accept(self),
ExprType::ActionCallExprT {
action_call_expr_node,
} => action_call_expr_node.accept(self),
ExprType::CallChainLiteralExprT {
call_chain_expr_node,
} => call_chain_expr_node.accept(self),
ExprType::VariableExprT { var_node: id_node } => id_node.accept(self),
_ => panic!("TODO"),
}
}
}
match_branch_node.accept(self);
}
if let Some(number_match_else_branch_node) = &number_match_test_node.else_branch_node_opt {
number_match_else_branch_node.accept(self);
}
AstVisitorReturnType::NumberMatchTestNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_number_match_test_match_branch_node(
&mut self,
number_match_test_match_branch_node: &NumberMatchTestMatchBranchNode,
) -> AstVisitorReturnType {
self.visit_decl_stmts(&number_match_test_match_branch_node.statements);
AstVisitorReturnType::NumberMatchTestMatchBranchNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_number_match_test_else_branch_node(
&mut self,
number_match_test_else_branch_node: &NumberMatchTestElseBranchNode,
) -> AstVisitorReturnType {
self.visit_decl_stmts(&number_match_test_else_branch_node.statements);
AstVisitorReturnType::NumberMatchElseBranchNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_number_match_test_pattern_node(
&mut self,
_match_pattern_node: &NumberMatchTestPatternNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::NumberMatchTestPatternNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_expression_list_node(&mut self, _expr_list: &ExprListNode) -> AstVisitorReturnType {
AstVisitorReturnType::ParentheticalExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_expression_list_node_to_string(
&mut self,
_expr_list: &ExprListNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::ParentheticalExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_literal_expression_node(
&mut self,
_literal_expression_node: &LiteralExprNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::ParentheticalExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_literal_expression_node_to_string(
&mut self,
_literal_expression_node: &LiteralExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::ParentheticalExpressionNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_identifier_node(&mut self, _identifier_node: &IdentifierNode) -> AstVisitorReturnType {
AstVisitorReturnType::IdentifierNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_identifier_node_to_string(
&mut self,
_identifier_node: &IdentifierNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::IdentifierNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_state_stack_operation_node(
&mut self,
_state_stack_operation_node: &StateStackOperationNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::StateStackOperationNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_state_stack_operation_node_to_string(
&mut self,
_state_stack_operation_node: &StateStackOperationNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::StateStackOperationNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_state_stack_operation_statement_node(
&mut self,
_state_stack_op_statement_node: &StateStackOperationStatementNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::StateStackOperationStatementNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_state_context_node(
&mut self,
_state_context_node: &StateContextNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::StateContextNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_frame_event_part(
&mut self,
_frame_event_part: &FrameEventPart,
) -> AstVisitorReturnType {
AstVisitorReturnType::FrameEventExprType {}
}
//* --------------------------------------------------------------------- *//
fn visit_frame_event_part_to_string(
&mut self,
_frame_event_part: &FrameEventPart,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::FrameEventExprType {}
}
//* --------------------------------------------------------------------- *//
fn visit_action_decl_node(&mut self, _action_decl_node: &ActionNode) -> AstVisitorReturnType {
AstVisitorReturnType::ActionDeclNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_action_impl_node(&mut self, _action_decl_node: &ActionNode) -> AstVisitorReturnType {
panic!("visit_action_impl_node() not implemented.");
}
//* --------------------------------------------------------------------- *//
fn visit_domain_variable_decl_node(
&mut self,
_variable_decl_node: &VariableDeclNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::VariableDeclNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_variable_decl_node(
&mut self,
_variable_decl_node: &VariableDeclNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::VariableDeclNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_variable_expr_node(&mut self, _variable_node: &VariableNode) -> AstVisitorReturnType {
AstVisitorReturnType::AssignmentExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_variable_expr_node_to_string(
&mut self,
_variable_node: &VariableNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::AssignmentExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_variable_stmt_node(
&mut self,
_variable_stmt_node: &VariableStmtNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::AssignmentExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_assignment_expr_node(
&mut self,
_assignment_expr_node: &AssignmentExprNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::AssignmentExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_assignment_expr_node_to_string(
&mut self,
_assignment_expr_node: &AssignmentExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::AssignmentExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_assignment_statement_node(
&mut self,
_assignment_stmt_node: &AssignmentStmtNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::AssignmentExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_unary_expr_node(&mut self, _unary_expr_node: &UnaryExprNode) -> AstVisitorReturnType {
AstVisitorReturnType::UnaryExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_unary_expr_node_to_string(
&mut self,
_unary_expr_node: &UnaryExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::UnaryExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_binary_expr_node(
&mut self,
_binary_expr_node: &BinaryExprNode,
) -> AstVisitorReturnType {
AstVisitorReturnType::BinaryExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_binary_expr_node_to_string(
&mut self,
_binary_expr_node: &BinaryExprNode,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::BinaryExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_operator_type(&mut self, _operator_type: &OperatorType) -> AstVisitorReturnType {
AstVisitorReturnType::BinaryExprNode {}
}
//* --------------------------------------------------------------------- *//
fn visit_operator_type_to_string(
&mut self,
_operator_type: &OperatorType,
_output: &mut String,
) -> AstVisitorReturnType {
AstVisitorReturnType::BinaryExprNode {}
}
}
| 34.912512 | 100 | 0.504769 |
0a447b8e2d1634be0575516004b548d2fdf55edb | 101 | pub mod connection_handler;
pub mod opt;
pub use self::connection_handler::*;
pub use self::opt::*;
| 16.833333 | 36 | 0.732673 |
e8cf067df71a7e6e2b82ba7429963ea3fdacd596 | 922 | // Copyright 2019 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use error_chain::error_chain;
error_chain! {
foreign_links {
GptMan(gptman::Error);
HexDecode(hex::FromHexError);
Io(std::io::Error);
Reqwest(reqwest::Error);
Url(url::ParseError);
Nix(nix::Error);
WalkDir(walkdir::Error);
Parse(std::num::ParseIntError);
}
}
| 31.793103 | 75 | 0.682213 |
76b5755f5d495cd1159d942388de76a835a5c2e7 | 1,115 | use std::env;
use std::error::Error;
use std::ffi::OsString;
use std::process;
fn run() -> Result<(), Box<dyn Error>> {
let file_path = get_first_arg()?;
let mut wtr = csv::Writer::from_path(file_path)?;
wtr.write_record(&[
"City",
"State",
"Population",
"Latitude",
"Longitude",
])?;
wtr.write_record(&[
"Davidsons Landing",
"AK",
"",
"65.2419444",
"-165.2716667",
])?;
wtr.write_record(&["Kenai", "AK", "7610", "60.5544444", "-151.2583333"])?;
wtr.write_record(&["Oakman", "AL", "", "33.7133333", "-87.3886111"])?;
wtr.flush()?;
Ok(())
}
/// Returns the first positional argument sent to this process. If there are no
/// positional arguments, then this returns an error.
fn get_first_arg() -> Result<OsString, Box<dyn Error>> {
match env::args_os().nth(1) {
None => Err(From::from("expected 1 argument, but got none")),
Some(file_path) => Ok(file_path),
}
}
fn main() {
if let Err(err) = run() {
println!("{}", err);
process::exit(1);
}
}
| 24.23913 | 79 | 0.544395 |
90a294a1487216f75aa45b77231c7a94b4aed455 | 999 | use rust_decimal::Decimal;
use thiserror::Error;
use crate::transaction::TransactionType;
#[derive(Debug, Error)]
pub(crate) enum Error {
#[error(transparent)]
CSV(#[from] csv::Error),
#[error("client `{0}` not found")]
ClientNotFound(u16),
#[error("no funds available (requested {requested:?} from client {client:?} with {available:} available)")]
NoFunds {
client: u16,
available: Decimal,
requested: Decimal,
},
#[error("deposit/withdrawal transaction has to specify amount")]
WithoutAmount,
#[error("dispute/resolve transaction must not specify amount")]
WithAmount,
#[error("client's account locked")]
ClientLocked,
#[error("transaction not found")]
TransactionNotFound(u32),
#[error("invalid transaction type `{0:?}`, only deposit/withdrawal can be referred")]
InvalidTxType(TransactionType),
#[error("transaction is not dissputed, cannot resolve/chargeback")]
TxNotDisputed(u32),
}
| 25.615385 | 111 | 0.665666 |
fe90db2393474366f3c0ee6e2be6f19aef9d864c | 86 | use ghost::phantom;
#[phantom]
#[derive(Debug, Clone)]
pub struct NodeId<T: ?Sized>;
| 14.333333 | 29 | 0.686047 |
d5b38a635d613876389938dae71123a2eebc9e0d | 1,756 | use std::sync::mpsc;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
pub struct ThreadPool {
workers: Vec<Worker>,
sender: mpsc::Sender<Job>,
}
type Job = Box<dyn FnOnce() + Send + 'static>;
impl ThreadPool {
/// Create a new ThreadPool.
///
/// The size is the number of threads in the pool.
///
/// # Panics
///
/// The `new` function will panic if the size is zero.
pub fn new(size: usize) -> ThreadPool {
assert!(size > 0);
let (sender, receiver) = mpsc::channel();
let receiver = Arc::new(Mutex::new(receiver));
let mut workers = Vec::with_capacity(size);
for id in 0..size {
workers.push(Worker::new(id, Arc::clone(&receiver)));
}
ThreadPool { workers, sender }
}
pub fn execute<F>(&self, f: F)
where
F: FnOnce() + Send + 'static,
{
let job = Box::new(f);
self.sender.send(job).unwrap();
}
}
// ANCHOR: here
impl Drop for ThreadPool {
fn drop(&mut self) {
for worker in &mut self.workers {
println!("Shutting down worker {}", worker.id);
if let Some(thread) = worker.thread.take() {
thread.join().unwrap();
}
}
}
}
// ANCHOR_END: here
struct Worker {
id: usize,
thread: Option<thread::JoinHandle<()>>,
}
impl Worker {
fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Job>>>) -> Worker {
let thread = thread::spawn(move || loop {
let job = receiver.lock().unwrap().recv().unwrap();
println!("Worker {} got a job; executing.", id);
job();
});
Worker {
id,
thread: Some(thread),
}
}
}
| 21.414634 | 76 | 0.525057 |
e5c5aac216e350143d4b6e0c9903e860a01b60de | 45 | pub mod api;
pub mod utils;
pub use api::*;
| 9 | 15 | 0.644444 |
7a6117c955698431b5c2d66f54aeab0f6b0cf557 | 4,065 | use crate::jcli_app::transaction::Error;
use crate::jcli_app::utils::{io, key_parser::read_ed25519_secret_key_from_file};
use bech32::{self, ToBase32 as _};
use chain_core::property::Serialize as _;
use chain_impl_mockchain::{
account::SpendingCounter,
header::HeaderId,
transaction::{TransactionSignDataHash, Witness},
};
use std::{io::Write, path::PathBuf};
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub struct MkWitness {
/// the Transaction ID of the witness to sign
#[structopt(name = "TRANSACTION_ID")]
pub sign_data_hash: TransactionSignDataHash,
/// the file path to the file to write the witness in.
/// If omitted it will be printed to the standard output.
pub output: Option<PathBuf>,
/// the type of witness to build: account, UTxO or Legacy UtxO
#[structopt(long = "type", parse(try_from_str))]
pub witness_type: WitnessType,
/// the hash of the block0, the first block of the blockchain
#[structopt(long = "genesis-block-hash", parse(try_from_str))]
pub genesis_block_hash: HeaderId,
/// value is mandatory is `--type=account' It is the counter for
/// every time the account is being utilized.
#[structopt(long = "account-spending-counter")]
pub account_spending_counter: Option<u32>,
/// the file path to the file to read the signing key from.
/// If omitted it will be read from the standard input.
pub secret: Option<PathBuf>,
}
pub enum WitnessType {
UTxO,
OldUTxO,
Account,
}
impl std::str::FromStr for WitnessType {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"utxo" => Ok(WitnessType::UTxO),
"legacy-utxo" => Ok(WitnessType::OldUTxO),
"account" => Ok(WitnessType::Account),
_ => Err("Invalid witness type, expected `utxo', `legacy-utxo' or `account'"),
}
}
}
impl MkWitness {
pub fn exec(self) -> Result<(), Error> {
let witness = match self.witness_type {
WitnessType::UTxO => {
let secret_key = read_ed25519_secret_key_from_file(&self.secret)?;
Witness::new_utxo(&self.genesis_block_hash, &self.sign_data_hash, |d| {
secret_key.sign(d)
})
}
WitnessType::OldUTxO => {
let secret_key = read_ed25519_secret_key_from_file(&self.secret)?;
Witness::new_old_utxo(
&self.genesis_block_hash,
&self.sign_data_hash,
|d| (secret_key.to_public(), secret_key.sign(d)),
&[0; 32],
)
}
WitnessType::Account => {
let account_spending_counter = self
.account_spending_counter
.ok_or(Error::MakeWitnessAccountCounterMissing)
.map(SpendingCounter::from)?;
let secret_key = read_ed25519_secret_key_from_file(&self.secret)?;
Witness::new_account(
&self.genesis_block_hash,
&self.sign_data_hash,
&account_spending_counter,
|d| secret_key.sign(d),
)
}
};
self.write_witness(&witness)
}
fn write_witness(&self, witness: &Witness) -> Result<(), Error> {
let mut writer =
io::open_file_write(&self.output).map_err(|source| Error::WitnessFileWriteFailed {
source,
path: self.output.clone().unwrap_or_default(),
})?;
let bytes = witness
.serialize_as_vec()
.map_err(Error::WitnessFileSerializationFailed)?;
let base32 = bytes.to_base32();
let bech32 = bech32::encode("witness", &base32)?;
writeln!(writer, "{}", bech32).map_err(|source| Error::WitnessFileWriteFailed {
source,
path: self.output.clone().unwrap_or_default(),
})
}
}
| 35.043103 | 94 | 0.588684 |
e5e4f21cb1e860f932e33069a49f8e8c81aae3cf | 1,832 | // https://leetcode-cn.com/problems/get-watched-videos-by-your-friends/
// Runtime: 28 ms
// Memory Usage: 3.1 MB
use std::collections::{HashMap, VecDeque};
pub fn watched_videos_by_friends(
watched_videos: Vec<Vec<String>>,
friends: Vec<Vec<i32>>,
id: i32,
level: i32,
) -> Vec<String> {
let n = watched_videos.len();
let mut visited = vec![false; n];
let mut queue = VecDeque::new();
let id = id as usize;
visited[id] = true;
queue.push_back((id, 0));
let mut freq: HashMap<String, usize> = HashMap::new();
while let Some((u, l)) = queue.pop_front() {
if l < level {
for &friend in &friends[u] {
let v = friend as usize;
if !visited[v] {
visited[v] = true;
queue.push_back((v, l + 1));
}
}
} else {
for video in &watched_videos[u] {
*freq.entry(video.to_string()).or_default() += 1;
}
}
}
let mut pairs: Vec<(usize, String)> = Vec::new();
for (video, count) in freq {
pairs.push((count, video));
}
pairs.sort_unstable();
pairs.into_iter().map(|p| p.1).collect()
}
// hash_table string breadth_first_search
#[test]
fn test2_1311() {
use leetcode_prelude::{vec2, vec2_string, vec_string};
assert_eq!(
watched_videos_by_friends(
vec2_string![["A", "B"], ["C"], ["B", "C"], ["D"]],
vec2![[1, 2], [0, 3], [0, 3], [1, 2]],
0,
1
),
vec_string!["B", "C"]
);
assert_eq!(
watched_videos_by_friends(
vec2_string![["A", "B"], ["C"], ["B", "C"], ["D"]],
vec2![[1, 2], [0, 3], [0, 3], [1, 2]],
0,
2
),
vec_string!["D"]
);
}
| 29.079365 | 71 | 0.486354 |
148c5b34855b33b89eedd8531f50836560d534b8 | 1,344 | #![feature(asm)]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
unsafe fn next_power_of_2(n: u32) -> u32 {
let mut tmp = n;
asm!("dec $0" : "+rm"(tmp) :: "cc");
let mut shift = 1_u32;
while shift <= 16 {
asm!(
"shr %cl, $2
or $2, $0
shl $$1, $1"
: "+&rm"(tmp), "+{ecx}"(shift) : "r"(tmp) : "cc"
);
}
asm!("inc $0" : "+rm"(tmp) :: "cc");
return tmp;
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub fn main() {
unsafe {
assert_eq!(64, next_power_of_2(37));
assert_eq!(2147483648, next_power_of_2(2147483647));
}
let mut y: isize = 5;
let x: isize;
unsafe {
// Treat the output as initialization.
asm!(
"shl $2, $1
add $3, $1
mov $1, $0"
: "=r"(x), "+r"(y) : "i"(3_usize), "ir"(7_usize) : "cc"
);
}
assert_eq!(x, 47);
assert_eq!(y, 47);
let mut x = x + 1;
assert_eq!(x, 48);
unsafe {
// Assignment to mutable.
// Early clobber "&":
// Forbids the use of a single register by both operands.
asm!("shr $$2, $1; add $1, $0" : "+&r"(x) : "r"(x) : "cc");
}
assert_eq!(x, 60);
}
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
pub fn main() {}
| 24.436364 | 67 | 0.459821 |
03f3564d017c5e54326552672c874ab9415862a4 | 1,396 | use std::any::Any;
use wasm_bindgen::prelude::*;
use js_sys::Object;
use crate::functions::*;
pub type Function = Closure<dyn FnMut()>;
#[wasm_bindgen(getter_with_clone)]
pub struct Command {
pub title: String,
pub command: String
}
pub struct VSCode;
impl VSCode {
pub async fn show_information_message_0(message: &str) -> Result<JsValue, JsValue> {
let fut = wasm_bindgen_futures::JsFuture::from(show_information_message_0(message));
let result = fut.await?;
return Ok(result);
}
pub async fn show_information_message_1(message: &str, btn_1: &str) -> Result<JsValue, JsValue> {
return wasm_bindgen_futures::JsFuture::from(show_information_message_1(message, btn_1)).await;
}
}
// #[wasm_bindgen]
// extern "C" {
// #[wasm_bindgen(extends = Object, typescript_type = "Thenable<any>")]
// #[derive(Clone, Debug)]
// pub type Disposable;
// #[wasm_bindgen(method, js_namespace= ["vscode", "Disposable"], js_name = "isDisposable")]
// pub fn is_disposable(obj: &JsValue);
// }
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(extends = Object, typescript_type = "Thenable<any>")]
#[derive(Clone, Debug)]
pub type Thenable;
#[wasm_bindgen(method, js_name = "then")]
pub fn then(this: &Thenable, onFulfilled: &Closure<dyn FnMut(JsValue)>, onRejected: &Closure<dyn FnMut(JsValue)>) -> Thenable;
}
| 27.372549 | 130 | 0.672636 |
7933ed3b998fb04b679f790a621c2c52f5c9dcec | 11,518 | // These tests require the thread-local scoped dispatcher, which only works when
// we have a standard library. The behaviour being tested should be the same
// with the standard lib disabled.
//
// The alternative would be for each of these tests to be defined in a separate
// file, which is :(
#![cfg(feature = "std")]
#[macro_use]
extern crate tracing;
mod support;
use self::support::*;
use tracing::{
collect::with_default,
field::{debug, display},
Level,
};
macro_rules! event_without_message {
($name:ident: $e:expr) => {
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn $name() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("answer")
.with_value(&42)
.and(
field::mock("to_question")
.with_value(&"life, the universe, and everything"),
)
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
info!(
answer = $e,
to_question = "life, the universe, and everything"
);
});
handle.assert_finished();
}
};
}
event_without_message! {event_without_message: 42}
event_without_message! {wrapping_event_without_message: std::num::Wrapping(42)}
event_without_message! {nonzeroi32_event_without_message: std::num::NonZeroI32::new(42).unwrap()}
// needs API breakage
//event_without_message!{nonzerou128_event_without_message: std::num::NonZeroU128::new(42).unwrap()}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn event_with_message() {
let (collector, handle) = collector::mock()
.event(event::mock().with_fields(field::mock("message").with_value(
&tracing::field::debug(format_args!("hello from my event! yak shaved = {:?}", true)),
)))
.done()
.run_with_handle();
with_default(collector, || {
debug!("hello from my event! yak shaved = {:?}", true);
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn message_without_delims() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("answer")
.with_value(&42)
.and(field::mock("question").with_value(&"life, the universe, and everything"))
.and(
field::mock("message").with_value(&tracing::field::debug(format_args!(
"hello from my event! tricky? {:?}!",
true
))),
)
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
let question = "life, the universe, and everything";
debug!(answer = 42, question, "hello from {where}! tricky? {:?}!", true, where = "my event");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn string_message_without_delims() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("answer")
.with_value(&42)
.and(field::mock("question").with_value(&"life, the universe, and everything"))
.and(
field::mock("message").with_value(&tracing::field::debug(format_args!(
"hello from my event"
))),
)
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
let question = "life, the universe, and everything";
debug!(answer = 42, question, "hello from my event");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn one_with_everything() {
let (collector, handle) = collector::mock()
.event(
event::mock()
.with_fields(
field::mock("message")
.with_value(&tracing::field::debug(format_args!(
"{:#x} make me one with{what:.>20}",
4_277_009_102u64,
what = "everything"
)))
.and(field::mock("foo").with_value(&666))
.and(field::mock("bar").with_value(&false))
.only(),
)
.at_level(Level::ERROR)
.with_target("whatever"),
)
.done()
.run_with_handle();
with_default(collector, || {
event!(
target: "whatever",
Level::ERROR,
{ foo = 666, bar = false },
"{:#x} make me one with{what:.>20}", 4_277_009_102u64, what = "everything"
);
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn moved_field() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("foo")
.with_value(&display("hello from my event"))
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
let from = "my event";
event!(Level::INFO, foo = display(format!("hello from {}", from)))
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn dotted_field_name() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("foo.bar")
.with_value(&true)
.and(field::mock("foo.baz").with_value(&false))
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
event!(Level::INFO, foo.bar = true, foo.baz = false);
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn borrowed_field() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("foo")
.with_value(&display("hello from my event"))
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
let from = "my event";
let mut message = format!("hello from {}", from);
event!(Level::INFO, foo = display(&message));
message.push_str(", which happened!");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
// If emitting log instrumentation, this gets moved anyway, breaking the test.
#[cfg(not(feature = "log"))]
fn move_field_out_of_struct() {
use tracing::field::debug;
#[derive(Debug)]
struct Position {
x: f32,
y: f32,
}
let pos = Position {
x: 3.234,
y: -1.223,
};
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("x")
.with_value(&debug(3.234))
.and(field::mock("y").with_value(&debug(-1.223)))
.only(),
),
)
.event(event::mock().with_fields(field::mock("position").with_value(&debug(&pos))))
.done()
.run_with_handle();
with_default(collector, || {
let pos = Position {
x: 3.234,
y: -1.223,
};
debug!(x = debug(pos.x), y = debug(pos.y));
debug!(target: "app_events", { position = debug(pos) }, "New position");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn display_shorthand() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("my_field")
.with_value(&display("hello world"))
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
event!(Level::TRACE, my_field = %"hello world");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn debug_shorthand() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("my_field")
.with_value(&debug("hello world"))
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
event!(Level::TRACE, my_field = ?"hello world");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn both_shorthands() {
let (collector, handle) = collector::mock()
.event(
event::mock().with_fields(
field::mock("display_field")
.with_value(&display("hello world"))
.and(field::mock("debug_field").with_value(&debug("hello world")))
.only(),
),
)
.done()
.run_with_handle();
with_default(collector, || {
event!(Level::TRACE, display_field = %"hello world", debug_field = ?"hello world");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn explicit_child() {
let (collector, handle) = collector::mock()
.new_span(span::mock().named("foo"))
.event(event::mock().with_explicit_parent(Some("foo")))
.done()
.run_with_handle();
with_default(collector, || {
let foo = span!(Level::TRACE, "foo");
event!(parent: foo.id(), Level::TRACE, "bar");
});
handle.assert_finished();
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)]
#[test]
fn explicit_child_at_levels() {
let (collector, handle) = collector::mock()
.new_span(span::mock().named("foo"))
.event(event::mock().with_explicit_parent(Some("foo")))
.event(event::mock().with_explicit_parent(Some("foo")))
.event(event::mock().with_explicit_parent(Some("foo")))
.event(event::mock().with_explicit_parent(Some("foo")))
.event(event::mock().with_explicit_parent(Some("foo")))
.done()
.run_with_handle();
with_default(collector, || {
let foo = span!(Level::TRACE, "foo");
trace!(parent: foo.id(), "a");
debug!(parent: foo.id(), "b");
info!(parent: foo.id(), "c");
warn!(parent: foo.id(), "d");
error!(parent: foo.id(), "e");
});
handle.assert_finished();
}
| 30.632979 | 101 | 0.519708 |
ed144a7f597bafbdbca6cb584afe0abe23ad9576 | 1,628 | use crate::ast::CodeGen;
use crate::ast::statements::assign_object::AssignObject;
use crate::ast::statements::break_object::BreakObject;
use crate::ast::statements::call_object::CallObject;
use crate::ast::statements::if_object::IfObject;
use crate::ast::statements::let_object::LetObject;
use crate::ast::statements::loop_object::LoopObject;
use crate::ast::statements::ret_object::RetObject;
use crate::error::error_token::ErrorToken;
///
/// `StatementObject` is an object which represents a statement of sources.
///
#[derive(Debug,PartialEq)]
pub enum StatementObject {
AssignObject(Box<AssignObject>),
BreakObject(Box<BreakObject>),
CallObject(Box<CallObject>),
IfObject(Box<IfObject>),
LetObject(Box<LetObject>),
LoopObject(Box<LoopObject>),
RetObject(Box<RetObject>)
}
impl<'ctx> StatementObject {
///
/// Generate the part of the abstract syntax tree.
///
pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> {
match self {
StatementObject::AssignObject(obj) => obj.codegen(gen),
StatementObject::BreakObject(obj) => obj.codegen(gen),
StatementObject::CallObject(obj) => obj.codegen(gen),
StatementObject::IfObject(obj) => obj.codegen(gen),
StatementObject::LetObject(obj) => obj.codegen(gen),
StatementObject::LoopObject(obj) => obj.codegen(gen),
StatementObject::RetObject(obj) => obj.codegen(gen)
}
}
}
pub mod assign_object;
pub mod break_object;
pub mod call_object;
pub mod if_object;
pub mod let_object;
pub mod loop_object;
pub mod ret_object;
| 33.22449 | 75 | 0.689189 |
79c89b199186f482b8f7513daf7939adcb017700 | 10,727 | /*
* Copyright (C) 2015 Benjamin Fry <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//! Query struct for looking up resource records
use std::fmt;
use std::fmt::{Display, Formatter};
use crate::error::*;
use crate::rr::dns_class::DNSClass;
use crate::rr::domain::Name;
use crate::rr::record_type::RecordType;
use crate::serialize::binary::*;
#[cfg(feature = "mdns")]
/// From [RFC 6762](https://tools.ietf.org/html/rfc6762#section-5.4)
/// ```text
// To avoid large floods of potentially unnecessary responses in these
// cases, Multicast DNS defines the top bit in the class field of a DNS
// question as the unicast-response bit.
/// ```
const MDNS_UNICAST_RESPONSE: u16 = 1 << 15;
/// Query struct for looking up resource records, basically a resource record without RDATA.
///
/// [RFC 1035, DOMAIN NAMES - IMPLEMENTATION AND SPECIFICATION, November 1987](https://tools.ietf.org/html/rfc1035)
///
/// ```text
/// 4.1.2. Question section format
///
/// The question section is used to carry the "question" in most queries,
/// i.e., the parameters that define what is being asked. The section
/// contains QDCOUNT (usually 1) entries, each of the following format:
///
/// 1 1 1 1 1 1
/// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
/// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
/// | |
/// / QNAME / ZNAME /
/// / /
/// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
/// | QTYPE / ZTYPE |
/// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
/// | QCLASS / ZCLASS |
/// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
///
/// ```
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct Query {
name: Name,
query_type: RecordType,
query_class: DNSClass,
#[cfg(feature = "mdns")]
mdns_unicast_response: bool,
}
impl Default for Query {
/// Return a default query with an empty name and A, IN for the query_type and query_class
fn default() -> Self {
Self {
name: Name::new(),
query_type: RecordType::A,
query_class: DNSClass::IN,
#[cfg(feature = "mdns")]
mdns_unicast_response: false,
}
}
}
impl Query {
/// Return a default query with an empty name and A, IN for the query_type and query_class
pub fn new() -> Self {
Self::default()
}
/// Create a new query from name and type, class defaults to IN
#[allow(clippy::self_named_constructors)]
pub fn query(name: Name, query_type: RecordType) -> Self {
Self {
name,
query_type,
query_class: DNSClass::IN,
#[cfg(feature = "mdns")]
mdns_unicast_response: false,
}
}
/// replaces name with the new name
pub fn set_name(&mut self, name: Name) -> &mut Self {
self.name = name;
self
}
/// Specify the RecordType being queried
pub fn set_query_type(&mut self, query_type: RecordType) -> &mut Self {
self.query_type = query_type;
self
}
/// Specify÷ the DNS class of the Query, almost always IN
pub fn set_query_class(&mut self, query_class: DNSClass) -> &mut Self {
self.query_class = query_class;
self
}
/// Changes mDNS unicast-response bit
/// See [RFC 6762](https://tools.ietf.org/html/rfc6762#section-5.4)
#[cfg(feature = "mdns")]
#[cfg_attr(docsrs, doc(cfg(feature = "mdns")))]
pub fn set_mdns_unicast_response(&mut self, flag: bool) -> &mut Self {
self.mdns_unicast_response = flag;
self
}
/// ```text
/// QNAME a domain name represented as a sequence of labels, where
/// each label consists of a length octet followed by that
/// number of octets. The domain name terminates with the
/// zero length octet for the null label of the root. Note
/// that this field may be an odd number of octets; no
/// padding is used.
/// ```
pub fn name(&self) -> &Name {
&self.name
}
/// ```text
/// QTYPE a two octet code which specifies the type of the query.
/// The values for this field include all codes valid for a
/// TYPE field, together with some more general codes which
/// can match more than one type of RR.
/// ```
pub fn query_type(&self) -> RecordType {
self.query_type
}
/// ```text
/// QCLASS a two octet code that specifies the class of the query.
/// For example, the QCLASS field is IN for the Internet.
/// ```
pub fn query_class(&self) -> DNSClass {
self.query_class
}
/// Returns if the mDNS unicast-response bit is set or not
/// See [RFC 6762](https://tools.ietf.org/html/rfc6762#section-5.4)
#[cfg(feature = "mdns")]
#[cfg_attr(docsrs, doc(cfg(feature = "mdns")))]
pub fn mdns_unicast_response(&self) -> bool {
self.mdns_unicast_response
}
/// Consumes `Query` and returns it's components
pub fn into_parts(self) -> QueryParts {
self.into()
}
}
/// Consumes `Query` giving public access to fields of `Query` so they can
/// be destructured and taken by value.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct QueryParts {
/// QNAME
pub name: Name,
/// QTYPE
pub query_type: RecordType,
/// QCLASS
pub query_class: DNSClass,
/// mDNS unicast-response bit set or not
#[cfg(feature = "mdns")]
#[cfg_attr(docsrs, doc(cfg(feature = "mdns")))]
pub mdns_unicast_response: bool,
}
impl From<Query> for QueryParts {
fn from(q: Query) -> Self {
cfg_if::cfg_if! {
if #[cfg(feature = "mdns")] {
let Query {
name,
query_type,
query_class,
mdns_unicast_response,
} = q;
} else {
let Query {
name,
query_type,
query_class,
} = q;
}
}
Self {
name,
query_type,
query_class,
#[cfg(feature = "mdns")]
mdns_unicast_response,
}
}
}
impl BinEncodable for Query {
fn emit(&self, encoder: &mut BinEncoder<'_>) -> ProtoResult<()> {
self.name.emit(encoder)?;
self.query_type.emit(encoder)?;
#[cfg(not(feature = "mdns"))]
self.query_class.emit(encoder)?;
#[cfg(feature = "mdns")]
{
if self.mdns_unicast_response {
encoder.emit_u16(u16::from(self.query_class()) | MDNS_UNICAST_RESPONSE)?;
} else {
self.query_class.emit(encoder)?;
}
}
Ok(())
}
}
impl<'r> BinDecodable<'r> for Query {
fn read(decoder: &mut BinDecoder<'r>) -> ProtoResult<Self> {
let name = Name::read(decoder)?;
let query_type = RecordType::read(decoder)?;
#[cfg(feature = "mdns")]
let mut mdns_unicast_response = false;
#[cfg(not(feature = "mdns"))]
let query_class = DNSClass::read(decoder)?;
#[cfg(feature = "mdns")]
let query_class = {
let query_class_value =
decoder.read_u16()?.unverified(/*DNSClass::from_u16 will verify the value*/);
if query_class_value & MDNS_UNICAST_RESPONSE > 0 {
mdns_unicast_response = true;
DNSClass::from_u16(query_class_value & !MDNS_UNICAST_RESPONSE)?
} else {
DNSClass::from_u16(query_class_value)?
}
};
Ok(Self {
name,
query_type,
query_class,
#[cfg(feature = "mdns")]
mdns_unicast_response,
})
}
}
impl Display for Query {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
#[cfg(not(feature = "mdns"))]
{
write!(
f,
"{name} {class} {ty}",
name = self.name,
class = self.query_class,
ty = self.query_type,
)
}
#[cfg(feature = "mdns")]
{
write!(
f,
"{name} {class} {ty}; mdns_unicast_response: {mdns}",
name = self.name,
class = self.query_class,
ty = self.query_type,
mdns = self.mdns_unicast_response
)
}
}
}
#[test]
#[allow(clippy::needless_update)]
fn test_read_and_emit() {
let expect = Query {
name: Name::from_ascii("WWW.example.com").unwrap(),
query_type: RecordType::AAAA,
query_class: DNSClass::IN,
..Query::default()
};
let mut byte_vec: Vec<u8> = Vec::with_capacity(512);
{
let mut encoder = BinEncoder::new(&mut byte_vec);
expect.emit(&mut encoder).unwrap();
}
let mut decoder = BinDecoder::new(&byte_vec);
let got = Query::read(&mut decoder).unwrap();
assert_eq!(got, expect);
}
#[cfg(feature = "mdns")]
#[test]
fn test_mdns_unicast_response_bit_handling() {
const QCLASS_OFFSET: usize = 1 /* empty name */ +
std::mem::size_of::<u16>() /* query_type */;
let mut query = Query::new();
query.set_mdns_unicast_response(true);
let mut vec_bytes: Vec<u8> = Vec::with_capacity(512);
{
let mut encoder = BinEncoder::new(&mut vec_bytes);
query.emit(&mut encoder).unwrap();
let query_class_slice = encoder.slice_of(QCLASS_OFFSET, QCLASS_OFFSET + 2);
assert_eq!(query_class_slice, &[0x80, 0x01]);
}
let mut decoder = BinDecoder::new(&vec_bytes);
let got = Query::read(&mut decoder).unwrap();
assert_eq!(got.query_class(), DNSClass::IN);
assert!(got.mdns_unicast_response());
}
| 31.18314 | 115 | 0.541531 |
e463e5814fbcef2d7ab3c7d0b6ca6f3cae94fccc | 215 |
Popuni šemu tako da zbir broja na početku svake strelice i broja na strelici bude jednak broju na kraju strelice.
@center@ @mycanvas()@
| 14.333333 | 114 | 0.497674 |
fc855153dc128a9aba80b2b512c37c603e9f7b91 | 4,557 | #[doc = "Register `HBN_SRAM` reader"]
pub struct R(crate::R<HBN_SRAM_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<HBN_SRAM_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<HBN_SRAM_SPEC>> for R {
fn from(reader: crate::R<HBN_SRAM_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `HBN_SRAM` writer"]
pub struct W(crate::W<HBN_SRAM_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<HBN_SRAM_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<HBN_SRAM_SPEC>> for W {
fn from(writer: crate::W<HBN_SRAM_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `retram_slp` reader - "]
pub struct RETRAM_SLP_R(crate::FieldReader<bool, bool>);
impl RETRAM_SLP_R {
pub(crate) fn new(bits: bool) -> Self {
RETRAM_SLP_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RETRAM_SLP_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `retram_slp` writer - "]
pub struct RETRAM_SLP_W<'a> {
w: &'a mut W,
}
impl<'a> RETRAM_SLP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Field `retram_ret` reader - "]
pub struct RETRAM_RET_R(crate::FieldReader<bool, bool>);
impl RETRAM_RET_R {
pub(crate) fn new(bits: bool) -> Self {
RETRAM_RET_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RETRAM_RET_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `retram_ret` writer - "]
pub struct RETRAM_RET_W<'a> {
w: &'a mut W,
}
impl<'a> RETRAM_RET_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
impl R {
#[doc = "Bit 7"]
#[inline(always)]
pub fn retram_slp(&self) -> RETRAM_SLP_R {
RETRAM_SLP_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 6"]
#[inline(always)]
pub fn retram_ret(&self) -> RETRAM_RET_R {
RETRAM_RET_R::new(((self.bits >> 6) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 7"]
#[inline(always)]
pub fn retram_slp(&mut self) -> RETRAM_SLP_W {
RETRAM_SLP_W { w: self }
}
#[doc = "Bit 6"]
#[inline(always)]
pub fn retram_ret(&mut self) -> RETRAM_RET_W {
RETRAM_RET_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "HBN_SRAM.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [hbn_sram](index.html) module"]
pub struct HBN_SRAM_SPEC;
impl crate::RegisterSpec for HBN_SRAM_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [hbn_sram::R](R) reader structure"]
impl crate::Readable for HBN_SRAM_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [hbn_sram::W](W) writer structure"]
impl crate::Writable for HBN_SRAM_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets HBN_SRAM to value 0"]
impl crate::Resettable for HBN_SRAM_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.211538 | 398 | 0.58284 |
64e0f112db431cf747a9c2228b7c7d8e7432438b | 446 | use std::env;
use std::process;
use minigrep::Config;
// ANCHOR: here
fn main() {
let config = Config::build(env::args()).unwrap_or_else(|err| {
eprintln!("Problem parsing arguments: {err}");
process::exit(1);
});
// --snip--
// ANCHOR_END: here
if let Err(e) = minigrep::run(config) {
eprintln!("Application error: {e}");
process::exit(1);
}
// ANCHOR: here
}
// ANCHOR_END: here
| 18.583333 | 66 | 0.560538 |
641bd15ea76082a46d93137ae838ca73ce7d091b | 2,468 | use crate::consts::PRIMES as primes;
use crate::elliptic_curve::{scalar_mul_for_factorization, EllipticCurve};
use crate::gf::GF;
use crate::traits::{Factorizer, Factors};
use num::bigint::BigInt;
use num_bigint::RandBigInt;
use num_traits::Zero;
pub struct ECM {
n: BigInt,
}
impl ECM {
pub fn new(n: &BigInt) -> Self {
Self { n: n.clone() }
}
}
impl Factorizer for ECM {
fn factorize(&self) -> Option<Factors> {
factorize(&self.n)
}
}
fn factorize(n: &BigInt) -> Option<Factors> {
let n = n.clone();
let mut rng = rand::thread_rng();
let low = BigInt::from(2u64);
let high = n.clone();
let l = 242u64;
loop {
let f = GF::GF(&n);
let x0 = f.new(&rng.gen_bigint_range(&low, &high));
let y0 = f.new(&rng.gen_bigint_range(&low, &high));
let a = f.new(&rng.gen_bigint_range(&low, &high));
let b = y0.clone().pow(&BigInt::from(2u32))?
- x0.clone().pow(&BigInt::from(3u32))?
- a.clone() * x0.clone();
let e = EllipticCurve::new(&f, &a.value, &b.value);
let g = e.new_point(&x0.value, &y0.value);
for p in primes {
let mut m: u64 = p.clone();
//let mut pg; // Previous g
let mut cg = (BigInt::from(2u32) * g.clone())?; // Current g
while m.clone() * p.clone() <= l {
m *= p.clone();
}
let xg = scalar_mul_for_factorization(BigInt::from(m), g.clone());
println!("{p} | {xg:?}");
//for k in 1..=m {
// pg = cg.clone();
// cg = (cg.clone() + g.clone())?;
// println!("{k}/{m}");
// if cg == e.o() {
// let m = pg.x() - g.x();
// let q = n.gcd(&m);
// println!("{k} | pg: {pg} cg: {cg} m: {m} found: {q} n: {n}");
// if q != BigInt::one() && q != n {
// println!("FOUND!!!!: q: {q}");
// break;
// }
// }
//}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bi;
use num::bigint::BigInt;
#[test]
fn test_ecm() {
//let n = bi!("121439531096594251777", 10);
//let n = bi!("455839", 10);
let n = bi!("835791", 10);
let ff = ECM::new(&n);
let f = ff.factorize();
println!("f: {f:?}");
}
}
| 28.367816 | 89 | 0.448947 |
db5408db30e42e8879aa25e5a8b3b9edf6234f1a | 1,679 | fn print(count: &mut usize, id: usize, layout: &layout::tree::LayoutR) {
*count += 1;
debug_println!("result: {:?} {:?} {:?}", *count, id, layout);
}
pub fn compute() {
let mut layout_tree = layout::tree::LayoutTree::default();
layout_tree.insert(
1,
0,
0,
layout::idtree::InsertType::Back,
layout::style::Style {
position_type: layout::style::PositionType::Absolute,
size: layout::geometry::Size {
width: layout::style::Dimension::Points(1920.0),
height: layout::style::Dimension::Points(1024.0),
},
..Default::default()
},
);
layout_tree.insert(
2,
1,
0,
layout::idtree::InsertType::Back,
layout::style::Style {
size: layout::geometry::Size {
width: layout::style::Dimension::Points(100f32),
height: layout::style::Dimension::Points(100f32),
..Default::default()
},
..Default::default()
},
);
layout_tree.insert(
3,
2,
0,
layout::idtree::InsertType::Back,
layout::style::Style {
size: layout::geometry::Size {
width: layout::style::Dimension::Points(10f32),
..Default::default()
},
margin: layout::geometry::Rect {
start: layout::style::Dimension::Points(10f32),
..Default::default()
},
..Default::default()
},
);
layout_tree.compute(print, &mut 0);
}
| 31.092593 | 73 | 0.473496 |
e4e45d5e201b29e2061a75f8db780edae623fb6b | 7,668 | use hal::backend::FastHashMap;
use std::ffi::CStr;
use std::sync::Mutex;
use std::{mem, ptr};
use d3d12;
use winapi::shared::minwindef::{FALSE, TRUE};
use winapi::shared::{dxgiformat, dxgitype, winerror};
use winapi::um::d3d12::*;
use winapi::Interface;
use native::{self, descriptor, pso};
#[derive(Clone, Debug)]
pub struct BlitPipe {
pub pipeline: native::PipelineState,
pub signature: native::RootSignature,
}
impl BlitPipe {
pub unsafe fn destroy(&self) {
self.pipeline.destroy();
self.signature.destroy();
}
}
// Information to pass to the shader
#[repr(C)]
#[derive(Debug)]
pub struct BlitData {
pub src_offset: [f32; 2],
pub src_extent: [f32; 2],
pub layer: f32,
pub level: f32,
}
pub type BlitKey = (dxgiformat::DXGI_FORMAT, d3d12::D3D12_FILTER);
type BlitMap = FastHashMap<BlitKey, BlitPipe>;
#[derive(Debug)]
pub(crate) struct ServicePipes {
pub(crate) device: native::Device,
blits_2d_color: Mutex<BlitMap>,
}
impl ServicePipes {
pub fn new(device: native::Device) -> Self {
ServicePipes {
device,
blits_2d_color: Mutex::new(FastHashMap::default()),
}
}
pub unsafe fn destroy(&self) {
let blits = self.blits_2d_color.lock().unwrap();
for (_, pipe) in &*blits {
pipe.destroy();
}
}
pub fn get_blit_2d_color(&self, key: BlitKey) -> BlitPipe {
let mut blits = self.blits_2d_color.lock().unwrap();
blits
.entry(key)
.or_insert_with(|| self.create_blit_2d_color(key))
.clone()
}
fn create_blit_2d_color(&self, (dst_format, filter): BlitKey) -> BlitPipe {
let descriptor_range = [descriptor::DescriptorRange::new(
descriptor::DescriptorRangeType::SRV,
1,
native::descriptor::Binding {
register: 0,
space: 0,
},
0,
)];
let root_parameters = [
descriptor::RootParameter::descriptor_table(
descriptor::ShaderVisibility::All,
&descriptor_range,
),
descriptor::RootParameter::constants(
descriptor::ShaderVisibility::All,
native::descriptor::Binding {
register: 0,
space: 0,
},
(mem::size_of::<BlitData>() / 4) as _,
),
];
let static_samplers = [descriptor::StaticSampler::new(
descriptor::ShaderVisibility::PS,
native::descriptor::Binding {
register: 0,
space: 0,
},
filter,
[
d3d12::D3D12_TEXTURE_ADDRESS_MODE_CLAMP,
d3d12::D3D12_TEXTURE_ADDRESS_MODE_CLAMP,
d3d12::D3D12_TEXTURE_ADDRESS_MODE_CLAMP,
],
0.0,
0,
d3d12::D3D12_COMPARISON_FUNC_ALWAYS,
descriptor::StaticBorderColor::TransparentBlack,
0.0 .. d3d12::D3D12_FLOAT32_MAX,
)];
let ((signature_raw, error), _hr) = native::RootSignature::serialize(
descriptor::RootSignatureVersion::V1_0,
&root_parameters,
&static_samplers,
descriptor::RootSignatureFlags::empty(),
);
if !error.is_null() {
error!("D3D12SerializeRootSignature error: {:?}", unsafe {
error.as_c_str().to_str().unwrap()
});
unsafe { error.destroy() };
}
let (signature, _hr) = self.device.create_root_signature(signature_raw, 0);
unsafe { signature_raw.destroy() };
let shader_src = include_bytes!("../shaders/blit.hlsl");
// TODO: check results
let ((vs, _), _hr_vs) = pso::Shader::compile(
shader_src,
unsafe { CStr::from_bytes_with_nul_unchecked(b"vs_5_0\0") },
unsafe { CStr::from_bytes_with_nul_unchecked(b"vs_blit_2d\0") },
pso::ShaderCompileFlags::empty(),
);
let ((ps, _), _hr_ps) = pso::Shader::compile(
shader_src,
unsafe { CStr::from_bytes_with_nul_unchecked(b"ps_5_0\0") },
unsafe { CStr::from_bytes_with_nul_unchecked(b"ps_blit_2d\0") },
pso::ShaderCompileFlags::empty(),
);
let mut rtvs = [dxgiformat::DXGI_FORMAT_UNKNOWN; 8];
rtvs[0] = dst_format;
let dummy_target = D3D12_RENDER_TARGET_BLEND_DESC {
BlendEnable: FALSE,
LogicOpEnable: FALSE,
SrcBlend: D3D12_BLEND_ZERO,
DestBlend: D3D12_BLEND_ZERO,
BlendOp: D3D12_BLEND_OP_ADD,
SrcBlendAlpha: D3D12_BLEND_ZERO,
DestBlendAlpha: D3D12_BLEND_ZERO,
BlendOpAlpha: D3D12_BLEND_OP_ADD,
LogicOp: D3D12_LOGIC_OP_CLEAR,
RenderTargetWriteMask: D3D12_COLOR_WRITE_ENABLE_ALL as _,
};
let render_targets = [dummy_target; 8];
let pso_desc = d3d12::D3D12_GRAPHICS_PIPELINE_STATE_DESC {
pRootSignature: signature.as_mut_ptr(),
VS: *pso::Shader::from_blob(vs),
PS: *pso::Shader::from_blob(ps),
GS: *pso::Shader::null(),
DS: *pso::Shader::null(),
HS: *pso::Shader::null(),
StreamOutput: d3d12::D3D12_STREAM_OUTPUT_DESC {
pSODeclaration: ptr::null(),
NumEntries: 0,
pBufferStrides: ptr::null(),
NumStrides: 0,
RasterizedStream: 0,
},
BlendState: d3d12::D3D12_BLEND_DESC {
AlphaToCoverageEnable: FALSE,
IndependentBlendEnable: FALSE,
RenderTarget: render_targets,
},
SampleMask: !0,
RasterizerState: D3D12_RASTERIZER_DESC {
FillMode: D3D12_FILL_MODE_SOLID,
CullMode: D3D12_CULL_MODE_NONE,
FrontCounterClockwise: TRUE,
DepthBias: 0,
DepthBiasClamp: 0.0,
SlopeScaledDepthBias: 0.0,
DepthClipEnable: FALSE,
MultisampleEnable: FALSE,
ForcedSampleCount: 0,
AntialiasedLineEnable: FALSE,
ConservativeRaster: D3D12_CONSERVATIVE_RASTERIZATION_MODE_OFF,
},
DepthStencilState: unsafe { mem::zeroed() },
InputLayout: d3d12::D3D12_INPUT_LAYOUT_DESC {
pInputElementDescs: ptr::null(),
NumElements: 0,
},
IBStripCutValue: d3d12::D3D12_INDEX_BUFFER_STRIP_CUT_VALUE_DISABLED,
PrimitiveTopologyType: D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,
NumRenderTargets: 1,
RTVFormats: rtvs,
DSVFormat: dxgiformat::DXGI_FORMAT_UNKNOWN,
SampleDesc: dxgitype::DXGI_SAMPLE_DESC {
Count: 1,
Quality: 0,
},
NodeMask: 0,
CachedPSO: d3d12::D3D12_CACHED_PIPELINE_STATE {
pCachedBlob: ptr::null(),
CachedBlobSizeInBytes: 0,
},
Flags: d3d12::D3D12_PIPELINE_STATE_FLAG_NONE,
};
let mut pipeline = native::PipelineState::null();
let hr = unsafe {
self.device.CreateGraphicsPipelineState(
&pso_desc,
&d3d12::ID3D12PipelineState::uuidof(),
pipeline.mut_void(),
)
};
assert_eq!(hr, winerror::S_OK);
BlitPipe {
pipeline,
signature,
}
}
}
| 32.769231 | 83 | 0.552817 |
d7d871e24d5b42deb9392e4e1a057db2c0310f27 | 14,544 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::common_utils::common::macros::{fx_err_and_bail, with_line};
use anyhow::Error;
use fidl_fuchsia_power as fpower;
use fidl_fuchsia_power_test as spower;
use fuchsia_component::client::connect_to_protocol;
use fuchsia_syslog::macros::{fx_log_err, fx_log_warn};
use fuchsia_zircon::Status;
use parking_lot::RwLock;
use serde_json::Value;
use std::time::Duration;
#[derive(Debug)]
struct InnerBatterySimulatorFacade {
proxy: Option<spower::BatterySimulatorProxy>,
}
#[derive(Debug)]
pub struct BatterySimulatorFacade {
inner: RwLock<InnerBatterySimulatorFacade>,
}
static TAG: &str = "BatterySimulatorFacade";
impl BatterySimulatorFacade {
pub fn new() -> Self {
BatterySimulatorFacade { inner: RwLock::new(InnerBatterySimulatorFacade { proxy: None }) }
}
/// Initialize proxy to perform changes
/// # Arguments
/// * 'given_proxy' - An optional proxy for testing purposes
pub fn init_proxy(
&self,
given_proxy: Option<spower::BatterySimulatorProxy>,
) -> Result<(), Error> {
if given_proxy.is_none() {
let proxy = self.create_proxy()?;
self.inner.write().proxy = Some(proxy);
} else {
self.inner.write().proxy = given_proxy;
}
Ok(())
}
/// Create proxy to BatterySimulator to perform changes
pub fn create_proxy(&self) -> Result<spower::BatterySimulatorProxy, Error> {
match connect_to_protocol::<spower::BatterySimulatorMarker>() {
Ok(service) => Ok(service),
Err(err) => fx_err_and_bail!(
&with_line!(TAG),
format_err!("Failed to create battery simulator proxy: {:?}", err)
),
}
}
/// Checks if the Facade proxy has been initialized
pub fn check_proxy(&self) -> Result<(), Error> {
if self.inner.read().proxy.as_ref().is_none() {
bail!("Facade proxy has not been initialized");
}
Ok(())
}
/// Set Time Remaining to given value represented in seconds
/// # Arguments
/// * 'time_remaining' - A json object with 'message' as the key and an integer as a value
/// representing time in seconds.
// TODO(fxbug.dev/48702): Check type conversion
pub fn set_time_remaining(&self, time_remaining: Value) -> Result<(), Error> {
self.check_proxy()?;
let seconds: u64 = match time_remaining["message"].as_u64() {
Some(v) => v,
None => bail!("Unable to get seconds"),
};
let microseconds = 0;
let duration = Duration::new(seconds, microseconds);
match self.inner.read().proxy.clone() {
Some(p) => p.set_time_remaining(duration.as_nanos() as i64)?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Set Battery Status to given value
/// # Arguments
/// * 'battery_status' - A json object with 'message' as the key and an string as a value
/// representing the battery status value
pub fn set_battery_status(&self, battery_status: Value) -> Result<(), Error> {
self.check_proxy()?;
let value: &str = match battery_status["message"].as_str() {
Some(v) => &v,
None => bail!("Unable to get battery status"),
};
let res = match value {
"UNKNOWN" => fpower::BatteryStatus::Unknown,
"OK" => fpower::BatteryStatus::Ok,
"NOT_AVAILABLE" => fpower::BatteryStatus::NotAvailable,
"NOT_PRESENT" => fpower::BatteryStatus::NotPresent,
_ => fx_err_and_bail!(&with_line!(TAG), format_err!("Battery Status not valid")),
};
match self.inner.read().proxy.clone() {
Some(p) => p.set_battery_status(res)?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Set Charge Status to given value represented as a string
/// # Arguments
/// * 'charge_status' - A json object with 'message' as the key and an string as a value
/// representing the charge status value
pub fn set_charge_status(&self, charge_status: Value) -> Result<(), Error> {
self.check_proxy()?;
let value: &str = match charge_status["message"].as_str() {
Some(v) => &v,
None => bail!("Unable to get charge status"),
};
let res = match value {
"UNKNOWN" => fpower::ChargeStatus::Unknown,
"NOT_CHARGING" => fpower::ChargeStatus::NotCharging,
"CHARGING" => fpower::ChargeStatus::Charging,
"DISCHARGING" => fpower::ChargeStatus::Discharging,
"FULL" => fpower::ChargeStatus::Full,
_ => fx_err_and_bail!(&with_line!(TAG), format_err!("Charge Status not valid")),
};
match self.inner.read().proxy.clone() {
Some(p) => p.set_charge_status(res)?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Set Level Status to given value represented as a string
/// # Arguments
/// * 'level_status' - A json object with 'message' as the key and an string as a value
/// representing the level status value
pub fn set_level_status(&self, level_status: Value) -> Result<(), Error> {
self.check_proxy()?;
let value: &str = match level_status["message"].as_str() {
Some(v) => &v,
None => bail!("Unable to get level status"),
};
let res = match value {
"UNKNOWN" => fpower::LevelStatus::Unknown,
"OK" => fpower::LevelStatus::Ok,
"WARNING" => fpower::LevelStatus::Warning,
"LOW" => fpower::LevelStatus::Low,
"CRITICAL" => fpower::LevelStatus::Critical,
_ => fx_err_and_bail!(&with_line!(TAG), format_err!("Level Status not valid")),
};
match self.inner.read().proxy.clone() {
Some(p) => p.set_level_status(res)?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Set Charge Source to given value represented
/// # Arguments
/// * 'charge_source' - A json object with 'message' as the key and an string as a value
/// representing the charge source value
pub fn set_charge_source(&self, charge_source: Value) -> Result<(), Error> {
self.check_proxy()?;
let value: &str = match &charge_source["message"].as_str() {
Some(v) => &v,
None => bail!("Unable to get charge source"),
};
let res = match value {
"UNKNOWN" => fpower::ChargeSource::Unknown,
"NONE" => fpower::ChargeSource::None,
"AC_ADAPTER" => fpower::ChargeSource::AcAdapter,
"USB" => fpower::ChargeSource::Usb,
"WIRELESS" => fpower::ChargeSource::Wireless,
_ => fx_err_and_bail!(&with_line!(TAG), format_err!("Charge Source not valid")),
};
match self.inner.read().proxy.clone() {
Some(p) => p.set_charge_source(res)?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Set Battery Percentage to given value
/// # Arguments
/// * 'battery_percentage' - A json object with 'message' as the key and an integer as a value
/// representing the battery percentage
pub fn set_battery_percentage(&self, battery_percentage: Value) -> Result<(), Error> {
self.check_proxy()?;
let percent: f32 = match battery_percentage["message"].to_string().parse() {
Ok(v) => v,
Err(e) => bail!("Unable to get battery percentage {}", e),
};
let battery_percentage_lower_bound = 0.0;
let battery_percentage_upper_bound = 100.0;
if percent < battery_percentage_lower_bound || percent > battery_percentage_upper_bound {
fx_err_and_bail!(
&with_line!(TAG),
format_err!("Battery Percentage not between 0 and 100")
)
}
match self.inner.read().proxy.clone() {
Some(p) => p.set_battery_percentage(percent)?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Disconnect Real Battery
pub fn disconnect_real_battery(&self) -> Result<(), Error> {
self.check_proxy()?;
match self.inner.read().proxy.clone() {
Some(p) => p.disconnect_real_battery()?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Reconnect Real Battery
pub fn reconnect_real_battery(&self) -> Result<(), Error> {
self.check_proxy()?;
match self.inner.read().proxy.clone() {
Some(p) => p.reconnect_real_battery()?,
None => bail!("Proxy not set"),
};
Ok(())
}
/// Returns the simulated battery info
pub async fn get_simulated_battery_info(&self) -> Result<Option<fpower::BatteryInfo>, Error> {
self.check_proxy()?;
match self.inner.read().proxy.clone() {
Some(p) => match p.get_battery_info().await {
Ok(battery_info) => Ok(Some(battery_info)),
Err(fidl::Error::ClientChannelClosed { status: Status::PEER_CLOSED, .. }) => {
fx_log_warn!("Battery Simulator not available.");
Ok(None)
}
Err(e) => fx_err_and_bail!(
&with_line!(TAG),
format_err!("Couldn't get BatteryInfo {}", e)
),
},
None => bail!("Proxy not set"),
}
}
/// Returns a boolean value indicating if the device is simulating the battery state
pub async fn get_simulating_state(&self) -> Result<Option<bool>, Error> {
self.check_proxy()?;
match self.inner.read().proxy.clone() {
Some(p) => match p.is_simulating().await {
Ok(simulation_state) => Ok(Some(simulation_state)),
Err(fidl::Error::ClientChannelClosed { status: Status::PEER_CLOSED, .. }) => {
fx_log_warn!("Battery Simulator not available.");
Ok(None)
}
Err(e) => fx_err_and_bail!(
&with_line!(TAG),
format_err!("Couldn't get simulation state {}", e)
),
},
None => bail!("Proxy not set"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[fuchsia_async::run_singlethreaded(test)]
async fn test_disconnect() {
// Launch battery manager
// Get Proxy and create Facade
let proxy = connect_to_protocol::<spower::BatterySimulatorMarker>().unwrap();
let facade = BatterySimulatorFacade::new();
let init_result = facade.init_proxy(Some(proxy));
assert!(init_result.is_ok(), "Failed to initialize proxy");
// Disconnect
let res = facade.disconnect_real_battery();
assert!(res.is_ok(), "Failed to disconnect");
// Check if simulating
let simulation_state = facade.get_simulating_state().await;
// When getting the state back, note that the DUT may not include
// battery support, so it may be empty. This is not a test failure.
match simulation_state.unwrap() {
Some(state) => {
assert_eq!(state, true);
// Reconnect real battery
let res = facade.reconnect_real_battery();
assert!(res.is_ok(), "Failed to reconnect");
}
None => fx_log_warn!("No battery state provided, skipping check"),
}
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_set_battery_percentage() {
// Launch battery manager
// Get Proxy and create Facade
let proxy = connect_to_protocol::<spower::BatterySimulatorMarker>().unwrap();
let facade = BatterySimulatorFacade::new();
let init_result = facade.init_proxy(Some(proxy));
assert!(init_result.is_ok(), "Failed to initialize proxy");
// Disconnect
let res = facade.disconnect_real_battery();
assert!(res.is_ok(), "Failed to disconnect");
// Set Battery Percentage
let res = facade.set_battery_percentage(json!({"message": 12}));
assert!(res.is_ok(), "Failed to set battery percentage");
// Get BatteryInfo
let battery_info = facade.get_simulated_battery_info().await;
assert!(battery_info.is_ok(), "Failed to get battery info");
// When getting the battery info back, note that the DUT may not include
// battery support, so info may be empty. This is not a test failure.
match battery_info.unwrap() {
Some(info) => {
assert_eq!(info.level_percent.unwrap(), 12.0);
// Reconnect real battery
let res = facade.reconnect_real_battery();
assert!(res.is_ok(), "Failed to reconnect");
}
None => fx_log_warn!("No battery info provided, skipping check"),
}
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_set_charge_source() {
// Launch battery manager
// Get Proxy and create Facade
let proxy = connect_to_protocol::<spower::BatterySimulatorMarker>().unwrap();
let facade = BatterySimulatorFacade::new();
let init_result = facade.init_proxy(Some(proxy));
assert!(init_result.is_ok(), "Failed to initialize proxy");
// Disconnect
let res = facade.disconnect_real_battery();
assert!(res.is_ok(), "Failed to disconnect");
// Set Charge Source
let res = facade.set_charge_source(json!({"message": "WIRELESS"}));
assert!(res.is_ok(), "Failed to set battery percentage");
// Get BatteryInfo
let battery_info = facade.get_simulated_battery_info().await;
assert!(battery_info.is_ok(), "Failed to get battery info");
match battery_info.unwrap() {
Some(info) => {
assert_eq!(info.charge_source.unwrap(), fpower::ChargeSource::Wireless);
// Reconnect real battery
let res = facade.reconnect_real_battery();
assert!(res.is_ok(), "Failed to reconnect");
}
None => fx_log_warn!("No battery info provided, skipping check"),
}
}
}
| 40.625698 | 98 | 0.580102 |
9cb28cde4659f8c312b4da014faecbbbe6ac0be1 | 9,792 | //! # Authority
//! A module to provide features for governance including dispatch method on
//! behalf other accounts and schdule dispatchables.
//!
//! - [`Trait`](./trait.Trait.html)
//! - [`Call`](./enum.Call.html)
//! - [`Module`](./struct.Module.html)
//!
//! ## Overview
//!
//! Two functionalities are provided by this module:
//! - schdule a dispatchable
//! - dispatch method with on behalf of other origins
#![cfg_attr(not(feature = "std"), no_std)]
// Disable the following three lints since they originate from an external macro
#![allow(clippy::string_lit_as_bytes)]
#![allow(clippy::boxed_local)]
#![allow(clippy::borrowed_box)]
use codec::{Decode, Encode};
use frame_support::{
decl_error, decl_event, decl_module, decl_storage,
dispatch::PostDispatchInfo,
traits::{
schedule::{DispatchTime, Named as ScheduleNamed, Priority},
EnsureOrigin, Get, IsType, OriginTrait,
},
weights::GetDispatchInfo,
Parameter,
};
use sp_runtime::{
traits::{CheckedSub, Dispatchable, Saturating},
DispatchError, DispatchResult, RuntimeDebug,
};
use sp_std::prelude::*;
mod mock;
mod tests;
/// A delayed origin. Can only be dispatched via `dispatch_as` with a delay.
#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)]
pub struct DelayedOrigin<BlockNumber, Origin> {
/// Number of blocks that this call have been delayed.
pub delay: BlockNumber,
/// The initial origin.
pub origin: Box<Origin>,
}
/// Ensure the origin have a minimum amount of delay.
pub struct EnsureDelayed<Delay, Inner, BlockNumber>(sp_std::marker::PhantomData<(Delay, Inner, BlockNumber)>);
impl<
O: Into<Result<DelayedOrigin<BlockNumber, O>, O>> + From<DelayedOrigin<BlockNumber, O>>,
Delay: Get<BlockNumber>,
Inner: EnsureOrigin<O>,
BlockNumber: PartialOrd,
> EnsureOrigin<O> for EnsureDelayed<Delay, Inner, BlockNumber>
{
type Success = Inner::Success;
fn try_origin(o: O) -> Result<Self::Success, O> {
o.into().and_then(|delayed_origin| {
if delayed_origin.delay >= Delay::get() {
Inner::try_origin(*delayed_origin.origin)
} else {
Err(*delayed_origin.origin)
}
})
}
#[cfg(feature = "runtime-benchmarks")]
fn successful_origin() -> O {
unimplemented!()
}
}
/// Origin for the authority module.
pub type Origin<T> = DelayedOrigin<<T as frame_system::Trait>::BlockNumber, <T as Trait>::PalletsOrigin>;
/// Config for orml-authority
pub trait AuthorityConfig<Origin, PalletsOrigin, BlockNumber> {
/// Check if the `origin` is allow to schedule a dispatchable call with a
/// given `priority`.
fn check_schedule_dispatch(origin: Origin, priority: Priority) -> DispatchResult;
/// Check if the `origin` is allow to fast track a scheduled task that
/// initially created by `initial_origin`. `new_delay` is number of blocks
/// this dispatchable will be dispatched from now after fast track.
fn check_fast_track_schedule(
origin: Origin,
initial_origin: &PalletsOrigin,
new_delay: BlockNumber,
) -> DispatchResult;
/// Check if the `origin` is allow to delay a scheduled task that initially
/// created by `inital_origin`.
fn check_delay_schedule(origin: Origin, initial_origin: &PalletsOrigin) -> DispatchResult;
/// Check if the `origin` is allow to cancel a scheduled task that initially
/// created by `inital_origin`.
fn check_cancel_schedule(origin: Origin, initial_origin: &PalletsOrigin) -> DispatchResult;
}
/// Represent an origin that can be dispatched by other origins with permission
/// check.
pub trait AsOriginId<Origin, PalletsOrigin> {
/// Convert into `PalletsOrigin`
fn into_origin(self) -> PalletsOrigin;
/// Check if the `origin` is allow to dispatch call on behalf of this
/// origin.
fn check_dispatch_from(&self, origin: Origin) -> DispatchResult;
}
type CallOf<T> = <T as Trait>::Call;
/// The schedule task index type.
pub type ScheduleTaskIndex = u32;
/// orml-authority configuration trait.
pub trait Trait: frame_system::Trait {
/// The overarching event type.
type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>;
/// The outer origin type.
type Origin: From<DelayedOrigin<Self::BlockNumber, <Self as Trait>::PalletsOrigin>>
+ IsType<<Self as frame_system::Trait>::Origin>
+ OriginTrait<PalletsOrigin = Self::PalletsOrigin>;
/// The caller origin, overarching type of all pallets origins.
type PalletsOrigin: Parameter + Into<<Self as frame_system::Trait>::Origin>;
/// The aggregated call type.
type Call: Parameter
+ Dispatchable<Origin = <Self as frame_system::Trait>::Origin, PostInfo = PostDispatchInfo>
+ GetDispatchInfo;
/// The Scheduler.
type Scheduler: ScheduleNamed<Self::BlockNumber, <Self as Trait>::Call, Self::PalletsOrigin>;
/// The type represent origin that can be dispatched by other origins.
type AsOriginId: Parameter + AsOriginId<<Self as frame_system::Trait>::Origin, Self::PalletsOrigin>;
/// Additional permission config.
type AuthorityConfig: AuthorityConfig<<Self as frame_system::Trait>::Origin, Self::PalletsOrigin, Self::BlockNumber>;
}
decl_error! {
pub enum Error for Module<T: Trait> {
/// Calculation overflow.
Overflow,
/// Failed to schedule a task.
FailedToSchedule,
/// Failed to cancel a task.
FailedToCancel,
}
}
decl_storage! {
trait Store for Module<T: Trait> as Authority {
/// Track the next task ID.
pub NextTaskIndex get(fn next_task_index): ScheduleTaskIndex;
}
}
decl_event! {
pub enum Event<T> where
<T as Trait>::PalletsOrigin,
<T as frame_system::Trait>::BlockNumber,
{
/// A call is dispatched. [result]
Dispatched(DispatchResult),
/// A call is scheduled. [origin, index]
Scheduled(PalletsOrigin, ScheduleTaskIndex),
/// A scheduled call is fast tracked. [origin, index, when]
FastTracked(PalletsOrigin, ScheduleTaskIndex, BlockNumber),
/// A scheduled call is delayed. [origin, index, when]
Delayed(PalletsOrigin, ScheduleTaskIndex, BlockNumber),
/// A scheduled call is cancelled. [origin, index]
Cancelled(PalletsOrigin, ScheduleTaskIndex),
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: <T as frame_system::Trait>::Origin {
type Error = Error<T>;
fn deposit_event() = default;
/// Dispatch a dispatchable on behalf of other origin
#[weight = (call.get_dispatch_info().weight + 10_000, call.get_dispatch_info().class)]
pub fn dispatch_as(
origin,
as_origin: T::AsOriginId,
call: Box<CallOf<T>>,
) {
as_origin.check_dispatch_from(origin)?;
let e = call.dispatch(as_origin.into_origin().into());
Self::deposit_event(RawEvent::Dispatched(e.map(|_| ()).map_err(|e| e.error)));
}
/// Schdule a dispatchable to be dispatched at later block.
/// This is the only way to dispatch a call with `DelayedOrigin`.
#[weight = 0]
pub fn schedule_dispatch(
origin,
when: DispatchTime<T::BlockNumber>,
priority: Priority,
with_delayed_origin: bool,
call: Box<CallOf<T>>,
) {
T::AuthorityConfig::check_schedule_dispatch(origin.clone(), priority)?;
let id = NextTaskIndex::mutate(|id| -> sp_std::result::Result<ScheduleTaskIndex, DispatchError> {
let current_id = *id;
*id = id.checked_add(1).ok_or(Error::<T>::Overflow)?;
Ok(current_id)
})?;
let now = frame_system::Module::<T>::block_number();
let delay = match when {
DispatchTime::At(x) => x.checked_sub(&now).ok_or(Error::<T>::Overflow)?,
DispatchTime::After(x) => x
};
let schedule_origin = if with_delayed_origin {
let origin: <T as Trait>::Origin = From::from(origin);
let origin: <T as Trait>::Origin = From::from(DelayedOrigin::<T::BlockNumber, T::PalletsOrigin> {
delay,
origin: Box::new(origin.caller().clone())
});
origin
} else {
<T as Trait>::Origin::from(origin)
};
let pallets_origin = schedule_origin.caller().clone();
T::Scheduler::schedule_named(
Encode::encode(&(&pallets_origin, id)),
when,
None,
priority,
pallets_origin.clone(),
*call,
).map_err(|_| Error::<T>::FailedToSchedule)?;
Self::deposit_event(RawEvent::Scheduled(pallets_origin, id));
}
/// Fast track a scheduled dispatchable.
#[weight = 0]
pub fn fast_track_scheduled_dispatch(
origin,
initial_origin: T::PalletsOrigin,
task_id: ScheduleTaskIndex,
when: DispatchTime<T::BlockNumber>,
) {
let now = frame_system::Module::<T>::block_number();
let new_delay = match when {
DispatchTime::At(x) => x.checked_sub(&now).ok_or(Error::<T>::Overflow)?,
DispatchTime::After(x) => x
};
T::AuthorityConfig::check_fast_track_schedule(origin, &initial_origin, new_delay)?;
let now = frame_system::Module::<T>::block_number();
let when = match when {
DispatchTime::At(x) => x,
DispatchTime::After(x) => now.saturating_add(x)
};
// TODO: depends https://github.com/paritytech/substrate/issues/6774
Self::deposit_event(RawEvent::FastTracked(initial_origin, task_id, when));
}
/// Delay a scheduled dispatchable.
#[weight = 0]
pub fn delay_scheduled_dispatch(
origin,
initial_origin: T::PalletsOrigin,
task_id: ScheduleTaskIndex,
_additional_delay: T::BlockNumber,
) {
T::AuthorityConfig::check_delay_schedule(origin, &initial_origin)?;
// TODO: depends https://github.com/paritytech/substrate/issues/6774
Self::deposit_event(RawEvent::Delayed(initial_origin, task_id, 0.into()));
}
/// Cancel a scheduled dispatchable.
#[weight = 0]
pub fn cancel_scheduled_dispatch(
origin,
initial_origin: T::PalletsOrigin,
task_id: ScheduleTaskIndex,
) {
T::AuthorityConfig::check_cancel_schedule(origin, &initial_origin)?;
T::Scheduler::cancel_named((&initial_origin, task_id).encode()).map_err(|_| Error::<T>::FailedToCancel)?;
Self::deposit_event(RawEvent::Cancelled(initial_origin, task_id));
}
}
}
| 31.68932 | 118 | 0.707618 |
fbb56b26fd2114b1b7c6ccd5a27ab94d662b28f1 | 746 | //! Definition of the Empty combinator, a future that's never ready.
use core::marker;
use {Future, Poll, Async};
/// A future which is never resolved.
///
/// This future can be created with the `empty` function.
#[derive(Debug)]
#[must_use = "futures do nothing unless polled"]
pub struct Empty<T, E> {
_data: marker::PhantomData<(T, E)>,
}
/// Creates a future which never resolves, representing a computation that never
/// finishes.
///
/// The returned future will forever return `Async::NotReady`.
pub fn empty<T, E>() -> Empty<T, E> {
Empty { _data: marker::PhantomData }
}
impl<T, E> Future for Empty<T, E> {
type Item = T;
type Error = E;
fn poll(&mut self) -> Poll<T, E> {
Ok(Async::NotReady)
}
}
| 23.3125 | 80 | 0.643432 |
dd104fcc0ee399ceb1b9e068ddf1c11096f8466b | 5,672 | use std::cell::RefCell;
use std::collections::HashMap;
use wasm_bindgen::prelude::*;
use web_sys::*;
static TEXTURED_QUAD_VS: &'static str = include_str!("./textured-quad-vertex.glsl");
static TEXTURED_QUAD_FS: &'static str = include_str!("./textured-quad-fragment.glsl");
static MESH_SKINNED_VS: &'static str = include_str!("./mesh-skinned-vertex.glsl");
static MESH_SKINNED_FS: &'static str = include_str!("./mesh-skinned-fragment.glsl");
static MESH_NON_SKINNED_VS: &'static str = include_str!("./mesh-non-skinned-vertex.glsl");
static MESH_NON_SKINNED_FS: &'static str = include_str!("./mesh-non-skinned-fragment.glsl");
static WATER_VS: &'static str = include_str!("./water-vertex.glsl");
static WATER_FS: &'static str = include_str!("./water-fragment.glsl");
/// Powers retrieving and using our shaders
pub struct ShaderSystem {
programs: HashMap<ShaderKind, Shader>,
active_program: RefCell<ShaderKind>,
}
impl ShaderSystem {
/// Create a new ShaderSystem
pub fn new(gl: &WebGlRenderingContext) -> ShaderSystem {
let mut programs = HashMap::new();
let water_shader = Shader::new(&gl, WATER_VS, WATER_FS).unwrap();
let non_skinned_shader =
Shader::new(&gl, MESH_NON_SKINNED_VS, MESH_NON_SKINNED_FS).unwrap();
let skinned_mesh_shader = Shader::new(&gl, MESH_SKINNED_VS, MESH_SKINNED_FS).unwrap();
let textured_quad_shader = Shader::new(&gl, TEXTURED_QUAD_VS, TEXTURED_QUAD_FS).unwrap();
let active_program = RefCell::new(ShaderKind::TexturedQuad);
gl.use_program(Some(&textured_quad_shader.program));
programs.insert(ShaderKind::Water, water_shader);
programs.insert(ShaderKind::NonSkinnedMesh, non_skinned_shader);
programs.insert(ShaderKind::SkinnedMesh, skinned_mesh_shader);
programs.insert(ShaderKind::TexturedQuad, textured_quad_shader);
ShaderSystem {
programs,
active_program,
}
}
/// Get one of our Shader's
pub fn get_shader(&self, shader_kind: &ShaderKind) -> Option<&Shader> {
self.programs.get(shader_kind)
}
/// Use a shader program. We cache the last used shader program to avoid unnecessary
/// calls to the GPU.
pub fn use_program(&self, gl: &WebGlRenderingContext, shader_kind: ShaderKind) {
if *self.active_program.borrow() == shader_kind {
return;
}
gl.use_program(Some(&self.programs.get(&shader_kind).unwrap().program));
*self.active_program.borrow_mut() = shader_kind;
}
}
/// Identifiers for our different shaders
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
pub enum ShaderKind {
Water,
NonSkinnedMesh,
SkinnedMesh,
TexturedQuad,
}
/// One per ShaderKind
pub struct Shader {
pub program: WebGlProgram,
uniforms: RefCell<HashMap<String, WebGlUniformLocation>>,
}
impl Shader {
/// Create a new Shader program from a vertex and fragment shader
fn new(
gl: &WebGlRenderingContext,
vert_shader: &str,
frag_shader: &str,
) -> Result<Shader, JsValue> {
let vert_shader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vert_shader)?;
let frag_shader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, frag_shader)?;
let program = link_program(&gl, &vert_shader, &frag_shader)?;
let uniforms = RefCell::new(HashMap::new());
Ok(Shader { program, uniforms })
}
/// Get the location of a uniform.
/// If this is our first time retrieving it we will cache it so that for future retrievals
/// we won't need to query the shader program.
pub fn get_uniform_location(
&self,
gl: &WebGlRenderingContext,
uniform_name: &str,
) -> Option<WebGlUniformLocation> {
let mut uniforms = self.uniforms.borrow_mut();
if uniforms.get(uniform_name).is_none() {
uniforms.insert(
uniform_name.to_string(),
gl.get_uniform_location(&self.program, uniform_name)
.expect(&format!(r#"Uniform '{}' not found"#, uniform_name)),
);
}
Some(uniforms.get(uniform_name).expect("loc").clone())
}
}
/// Create a shader program using the WebGL APIs
fn compile_shader(
gl: &WebGlRenderingContext,
shader_type: u32,
source: &str,
) -> Result<WebGlShader, String> {
let shader = gl
.create_shader(shader_type)
.ok_or_else(|| "Could not create shader".to_string())?;
gl.shader_source(&shader, source);
gl.compile_shader(&shader);
if gl
.get_shader_parameter(&shader, WebGlRenderingContext::COMPILE_STATUS)
.as_bool()
.unwrap_or(false)
{
Ok(shader)
} else {
Err(gl
.get_shader_info_log(&shader)
.unwrap_or_else(|| "Unknown error creating shader".to_string()))
}
}
/// Link a shader program using the WebGL APIs
fn link_program(
gl: &WebGlRenderingContext,
vert_shader: &WebGlShader,
frag_shader: &WebGlShader,
) -> Result<WebGlProgram, String> {
let program = gl
.create_program()
.ok_or_else(|| "Unable to create shader program".to_string())?;
gl.attach_shader(&program, &vert_shader);
gl.attach_shader(&program, &frag_shader);
gl.link_program(&program);
if gl
.get_program_parameter(&program, WebGlRenderingContext::LINK_STATUS)
.as_bool()
.unwrap_or(false)
{
Ok(program)
} else {
Err(gl
.get_program_info_log(&program)
.unwrap_or_else(|| "Unknown error creating program".to_string()))
}
}
| 33.169591 | 100 | 0.658145 |
75feed4656d8bcafba34c73c6107157211bdeeec | 1,997 | use std::io;
use std::io::prelude::*;
pub fn solve(input: impl BufRead, part: u8) -> io::Result<()> {
let input = parse(input);
let solution = match part {
1 => part_1(input),
2 => part_2(input.1),
_ => unimplemented!(),
};
println!("{}", solution);
Ok(())
}
fn parse(mut input: impl BufRead) -> (u64, Vec<Option<u64>>) {
let mut input_str = String::new();
let _ = input.read_line(&mut input_str);
input_str.pop(); // Remove newline char
let earliest_time = input_str.parse().unwrap();
input_str.clear();
let _ = input.read_line(&mut input_str);
input_str.pop(); // Remove newline char
let buses = input_str
.split(',')
.map(|n| {
if n == "x" {
None
} else {
Some(n.parse::<u64>().unwrap())
}
})
.collect();
(earliest_time, buses)
}
fn part_1((earliest, buses): (u64, Vec<Option<u64>>)) -> u64 {
let (b, t) = buses
.into_iter()
.filter_map(|b| b.map(|b| (b, b - earliest % b)))
.min_by_key(|(_, t)| *t)
.unwrap();
b * t
}
fn part_2(buses: Vec<Option<u64>>) -> u64 {
buses
.into_iter()
.enumerate()
.filter_map(|(i, b)| b.map(|b| (i as u64, b)))
.fold((0, 1), |(mut offset, period), (bus_offset, bus)| {
while (offset + bus_offset) % bus != 0 {
offset += period;
}
// Period should use LCM, this is a cheap LCM that assumes prime buses
(offset, period * bus)
})
.0
}
#[cfg(test)]
mod tests {
use super::*;
const EXAMPLE: &str = "939
7,13,x,x,59,x,31,19
";
#[test]
fn part_1_example() {
let input = parse(io::Cursor::new(EXAMPLE));
assert_eq!(part_1(input), 295);
}
#[test]
fn part_2_example() {
let input = parse(io::Cursor::new(EXAMPLE));
assert_eq!(part_2(input.1), 1068781);
}
}
| 22.693182 | 82 | 0.500751 |
7a486254f90cd89f21df488a7b0ec9d0b9b2bd40 | 18,832 | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
//! Scheduler which schedules the execution of `storage::Command`s.
//!
//! There is one scheduler for each store. It receives commands from clients, executes them against
//! the MVCC layer storage engine.
//!
//! Logically, the data organization hierarchy from bottom to top is row -> region -> store ->
//! database. But each region is replicated onto N stores for reliability, the replicas form a Raft
//! group, one of which acts as the leader. When the client read or write a row, the command is
//! sent to the scheduler which is on the region leader's store.
//!
//! Scheduler runs in a single-thread event loop, but command executions are delegated to a pool of
//! worker thread.
//!
//! Scheduler keeps track of all the running commands and uses latches to ensure serialized access
//! to the overlapping rows involved in concurrent commands. But note that scheduler only ensures
//! serialized access to the overlapping rows at command level, but a transaction may consist of
//! multiple commands, therefore conflicts may happen at transaction level. Transaction semantics
//! is ensured by the transaction protocol implemented in the client library, which is transparent
//! to the scheduler.
use std::fmt::{self, Debug, Display, Formatter};
use std::u64;
use kvproto::kvrpcpb::CommandPri;
use prometheus::HistogramTimer;
use crate::storage::kv::Result as EngineResult;
use crate::storage::Key;
use crate::storage::{Command, Engine, Error as StorageError, StorageCb};
use tikv_util::collections::HashMap;
use tikv_util::worker::{self, Runnable};
use super::super::metrics::*;
use super::latch::{Latches, Lock};
use super::process::{execute_callback, Executor, ProcessResult, Task};
use super::sched_pool::*;
use super::Error;
pub const CMD_BATCH_SIZE: usize = 256;
/// Message types for the scheduler event loop.
pub enum Msg {
RawCmd {
cmd: Command,
cb: StorageCb,
},
ReadFinished {
cid: u64,
pr: ProcessResult,
tag: &'static str,
},
WriteFinished {
cid: u64,
pr: ProcessResult,
result: EngineResult<()>,
tag: &'static str,
},
FinishedWithErr {
cid: u64,
err: Error,
tag: &'static str,
},
}
/// Debug for messages.
impl Debug for Msg {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self)
}
}
/// Display for messages.
impl Display for Msg {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
Msg::RawCmd { ref cmd, .. } => write!(f, "RawCmd {:?}", cmd),
Msg::ReadFinished { cid, .. } => write!(f, "ReadFinished [cid={}]", cid),
Msg::WriteFinished { cid, .. } => write!(f, "WriteFinished [cid={}]", cid),
Msg::FinishedWithErr { cid, .. } => write!(f, "FinishedWithErr [cid={}]", cid),
}
}
}
// It stores context of a task.
struct TaskContext {
lock: Lock,
cb: StorageCb,
write_bytes: usize,
tag: &'static str,
// How long it waits on latches.
latch_timer: Option<HistogramTimer>,
// Total duration of a command.
_cmd_timer: HistogramTimer,
}
impl TaskContext {
fn new(lock: Lock, cb: StorageCb, cmd: &Command) -> TaskContext {
let write_bytes = if lock.is_write_lock() {
cmd.write_bytes()
} else {
0
};
TaskContext {
lock,
cb,
write_bytes,
tag: cmd.tag(),
latch_timer: Some(
SCHED_LATCH_HISTOGRAM_VEC
.with_label_values(&[cmd.tag()])
.start_coarse_timer(),
),
_cmd_timer: SCHED_HISTOGRAM_VEC
.with_label_values(&[cmd.tag()])
.start_coarse_timer(),
}
}
fn on_schedule(&mut self) {
self.latch_timer.take();
}
}
/// Scheduler which schedules the execution of `storage::Command`s.
pub struct Scheduler<E: Engine> {
engine: E,
/// cid -> Task
pending_tasks: HashMap<u64, Task>,
// cid -> TaskContext
task_contexts: HashMap<u64, TaskContext>,
// actual scheduler to schedule the execution of commands
scheduler: worker::Scheduler<Msg>,
// cmd id generator
id_alloc: u64,
// write concurrency control
latches: Latches,
// TODO: Dynamically calculate this value according to processing
/// speed of recent write requests.
sched_pending_write_threshold: usize,
// worker pool
worker_pool: SchedPool<E>,
// high priority commands and system commands will be delivered to this pool
high_priority_pool: SchedPool<E>,
// used to control write flow
running_write_bytes: usize,
}
impl<E: Engine> Scheduler<E> {
/// Creates a scheduler.
pub fn new(
engine: E,
scheduler: worker::Scheduler<Msg>,
concurrency: usize,
worker_pool_size: usize,
sched_pending_write_threshold: usize,
) -> Self {
Scheduler {
engine: engine.clone(),
// TODO: GC these two maps.
pending_tasks: Default::default(),
task_contexts: Default::default(),
scheduler,
id_alloc: 0,
latches: Latches::new(concurrency),
sched_pending_write_threshold,
worker_pool: SchedPool::new(engine.clone(), worker_pool_size, "sched-worker-pool"),
high_priority_pool: SchedPool::new(
engine.clone(),
std::cmp::max(1, worker_pool_size / 2),
"sched-high-pri-pool",
),
running_write_bytes: 0,
}
}
/// Generates the next command ID.
fn gen_id(&mut self) -> u64 {
self.id_alloc += 1;
self.id_alloc
}
fn dequeue_task(&mut self, cid: u64) -> Task {
let task = self.pending_tasks.remove(&cid).unwrap();
assert_eq!(task.cid, cid);
task
}
fn enqueue_task(&mut self, task: Task, callback: StorageCb) {
let cid = task.cid;
let tctx = {
let cmd = task.cmd();
let lock = self.gen_lock(cmd);
TaskContext::new(lock, callback, cmd)
};
self.running_write_bytes += tctx.write_bytes;
SCHED_WRITING_BYTES_GAUGE.set(self.running_write_bytes as i64);
if self.pending_tasks.insert(cid, task).is_some() {
panic!("command cid={} shouldn't exist", cid);
}
SCHED_CONTEX_GAUGE.set(self.pending_tasks.len() as i64);
if self.task_contexts.insert(cid, tctx).is_some() {
panic!("TaskContext cid={} shouldn't exist", cid);
}
}
fn dequeue_task_context(&mut self, cid: u64) -> TaskContext {
let tctx = self.task_contexts.remove(&cid).unwrap();
self.running_write_bytes -= tctx.write_bytes;
SCHED_WRITING_BYTES_GAUGE.set(self.running_write_bytes as i64);
SCHED_CONTEX_GAUGE.set(self.pending_tasks.len() as i64);
tctx
}
fn fetch_executor(&self, priority: CommandPri, is_sys_cmd: bool) -> Executor<E> {
let pool = if priority == CommandPri::High || is_sys_cmd {
self.high_priority_pool.clone()
} else {
self.worker_pool.clone()
};
let scheduler = self.scheduler.clone();
Executor::new(scheduler, pool)
}
/// Event handler for new command.
///
/// This method will try to acquire all the necessary latches. If all the necessary latches are
/// acquired, the method initiates a get snapshot operation for furthur processing; otherwise,
/// the method adds the command to the waiting queue(s). The command will be handled later in
/// `try_to_wake_up` when its turn comes.
///
/// Note that once a command is ready to execute, the snapshot is always up-to-date during the
/// execution because 1) all the conflicting commands (if any) must be in the waiting queues;
/// 2) there may be non-conflicitng commands running concurrently, but it doesn't matter.
fn schedule_command(&mut self, cmd: Command, callback: StorageCb) {
let cid = self.gen_id();
debug!("received new command"; "cid" => cid, "cmd" => %cmd);
let tag = cmd.tag();
let priority_tag = cmd.priority_tag();
let task = Task::new(cid, cmd);
// TODO: enqueue_task should return an reference of the tctx.
self.enqueue_task(task, callback);
self.try_to_wake_up(cid);
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "new"])
.inc();
SCHED_COMMANDS_PRI_COUNTER_VEC
.with_label_values(&[priority_tag])
.inc();
}
/// Tries to acquire all the necessary latches. If all the necessary latches are acquired,
/// the method initiates a get snapshot operation for furthur processing.
fn try_to_wake_up(&mut self, cid: u64) {
let wake = if let Some(tctx) = self.acquire_lock(cid) {
tctx.on_schedule();
true
} else {
false
};
if wake {
self.get_snapshot(cid);
}
}
fn too_busy(&self) -> bool {
fail_point!("txn_scheduler_busy", |_| true);
self.running_write_bytes >= self.sched_pending_write_threshold
}
fn on_receive_new_cmd(&mut self, cmd: Command, callback: StorageCb) {
// write flow control
if cmd.need_flow_control() && self.too_busy() {
SCHED_TOO_BUSY_COUNTER_VEC
.with_label_values(&[cmd.tag()])
.inc();
execute_callback(
callback,
ProcessResult::Failed {
err: StorageError::SchedTooBusy,
},
);
return;
}
self.schedule_command(cmd, callback);
}
/// Initiates an async operation to get a snapshot from the storage engine, then posts a
/// `SnapshotFinished` message back to the event loop when it finishes.
fn get_snapshot(&mut self, cid: u64) {
let task = self.dequeue_task(cid);
let tag = task.tag;
let ctx = task.context().clone();
let executor = self.fetch_executor(task.priority(), task.cmd().is_sys_cmd());
let cb = Box::new(move |(cb_ctx, snapshot)| {
executor.execute(cb_ctx, snapshot, task);
});
if let Err(e) = self.engine.async_snapshot(&ctx, cb) {
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "async_snapshot_err"])
.inc();
error!("engine async_snapshot failed"; "err" => ?e);
self.finish_with_err(cid, e.into());
} else {
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "snapshot"])
.inc();
}
}
/// Calls the callback with an error.
fn finish_with_err(&mut self, cid: u64, err: Error) {
debug!("command finished with error"; "cid" => cid);
let tctx = self.dequeue_task_context(cid);
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tctx.tag, "error"])
.inc();
let pr = ProcessResult::Failed {
err: StorageError::from(err),
};
execute_callback(tctx.cb, pr);
self.release_lock(&tctx.lock, cid);
}
/// Event handler for the success of read.
///
/// If a next command is present, continues to execute; otherwise, delivers the result to the
/// callback.
fn on_read_finished(&mut self, cid: u64, pr: ProcessResult, tag: &str) {
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "read_finish"])
.inc();
debug!("read command finished"; "cid" => cid);
let tctx = self.dequeue_task_context(cid);
if let ProcessResult::NextCommand { cmd } = pr {
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "next_cmd"])
.inc();
self.schedule_command(cmd, tctx.cb);
} else {
execute_callback(tctx.cb, pr);
}
self.release_lock(&tctx.lock, cid);
}
/// Event handler for the success of write.
fn on_write_finished(
&mut self,
cid: u64,
pr: ProcessResult,
result: EngineResult<()>,
tag: &str,
) {
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "write_finish"])
.inc();
debug!("write finished for command"; "cid" => cid);
let tctx = self.dequeue_task_context(cid);
let pr = match result {
Ok(()) => pr,
Err(e) => ProcessResult::Failed {
err: crate::storage::Error::from(e),
},
};
if let ProcessResult::NextCommand { cmd } = pr {
SCHED_STAGE_COUNTER_VEC
.with_label_values(&[tag, "next_cmd"])
.inc();
self.schedule_command(cmd, tctx.cb);
} else {
execute_callback(tctx.cb, pr);
}
self.release_lock(&tctx.lock, cid);
}
/// Generates the lock for a command.
///
/// Basically, read-only commands require no latches, write commands require latches hashed
/// by the referenced keys.
fn gen_lock(&self, cmd: &Command) -> Lock {
gen_command_lock(&self.latches, cmd)
}
/// Tries to acquire all the required latches for a command.
///
/// Returns `Some(TaskContext)` if successful; returns `None` otherwise.
fn acquire_lock(&mut self, cid: u64) -> Option<&mut TaskContext> {
let tctx = self.task_contexts.get_mut(&cid).unwrap();
if self.latches.acquire(&mut tctx.lock, cid) {
Some(tctx)
} else {
None
}
}
/// Releases all the latches held by a command.
fn release_lock(&mut self, lock: &Lock, cid: u64) {
let wakeup_list = self.latches.release(lock, cid);
for wcid in wakeup_list {
self.try_to_wake_up(wcid);
}
}
}
impl<E: Engine> Runnable<Msg> for Scheduler<E> {
fn run_batch(&mut self, msgs: &mut Vec<Msg>) {
for msg in msgs.drain(..) {
match msg {
Msg::RawCmd { cmd, cb } => self.on_receive_new_cmd(cmd, cb),
Msg::ReadFinished { cid, tag, pr } => self.on_read_finished(cid, pr, tag),
Msg::WriteFinished {
cid,
tag,
pr,
result,
} => self.on_write_finished(cid, pr, result, tag),
Msg::FinishedWithErr { cid, err, .. } => self.finish_with_err(cid, err),
}
}
}
}
fn gen_command_lock(latches: &Latches, cmd: &Command) -> Lock {
match *cmd {
Command::Prewrite { ref mutations, .. } => {
let keys: Vec<&Key> = mutations.iter().map(|x| x.key()).collect();
latches.gen_lock(&keys)
}
Command::ResolveLock { ref key_locks, .. } => {
let keys: Vec<&Key> = key_locks.iter().map(|x| &x.0).collect();
latches.gen_lock(&keys)
}
Command::Commit { ref keys, .. } | Command::Rollback { ref keys, .. } => {
latches.gen_lock(keys)
}
Command::Cleanup { ref key, .. } => latches.gen_lock(&[key]),
Command::Pause { ref keys, .. } => latches.gen_lock(keys),
_ => Lock::new(vec![]),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::storage::mvcc;
use crate::storage::txn::latch::*;
use crate::storage::{Command, Key, Mutation, Options};
use kvproto::kvrpcpb::Context;
use tikv_util::collections::HashMap;
#[test]
fn test_command_latches() {
let mut temp_map = HashMap::default();
temp_map.insert(10, 20);
let readonly_cmds = vec![
Command::ScanLock {
ctx: Context::new(),
max_ts: 5,
start_key: None,
limit: 0,
},
Command::ResolveLock {
ctx: Context::new(),
txn_status: temp_map.clone(),
scan_key: None,
key_locks: vec![],
},
Command::MvccByKey {
ctx: Context::new(),
key: Key::from_raw(b"k"),
},
Command::MvccByStartTs {
ctx: Context::new(),
start_ts: 25,
},
];
let write_cmds = vec![
Command::Prewrite {
ctx: Context::new(),
mutations: vec![Mutation::Put((Key::from_raw(b"k"), b"v".to_vec()))],
primary: b"k".to_vec(),
start_ts: 10,
options: Options::default(),
},
Command::Commit {
ctx: Context::new(),
keys: vec![Key::from_raw(b"k")],
lock_ts: 10,
commit_ts: 20,
},
Command::Cleanup {
ctx: Context::new(),
key: Key::from_raw(b"k"),
start_ts: 10,
},
Command::Rollback {
ctx: Context::new(),
keys: vec![Key::from_raw(b"k")],
start_ts: 10,
},
Command::ResolveLock {
ctx: Context::new(),
txn_status: temp_map.clone(),
scan_key: None,
key_locks: vec![(
Key::from_raw(b"k"),
mvcc::Lock::new(mvcc::LockType::Put, b"k".to_vec(), 10, 20, None),
)],
},
];
let mut latches = Latches::new(1024);
let write_locks: Vec<Lock> = write_cmds
.into_iter()
.enumerate()
.map(|(id, cmd)| {
let mut lock = gen_command_lock(&latches, &cmd);
assert_eq!(latches.acquire(&mut lock, id as u64), id == 0);
lock
})
.collect();
for (id, cmd) in readonly_cmds.iter().enumerate() {
let mut lock = gen_command_lock(&latches, cmd);
assert!(latches.acquire(&mut lock, id as u64));
}
// acquire/release locks one by one.
let max_id = write_locks.len() as u64 - 1;
for (id, mut lock) in write_locks.into_iter().enumerate() {
let id = id as u64;
if id != 0 {
assert!(latches.acquire(&mut lock, id));
}
let unlocked = latches.release(&lock, id);
if id as u64 == max_id {
assert!(unlocked.is_empty());
} else {
assert_eq!(unlocked, vec![id + 1]);
}
}
}
}
| 32.923077 | 99 | 0.556181 |
e4f939c151f3ef1564b3e35de6ca619fc04a54de | 4,183 | use crate::deriving::path_std;
use crate::deriving::generic::*;
use crate::deriving::generic::ty::*;
use syntax::ast::{self, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span;
pub fn expand_deriving_ord(cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(cx, cmp::Ord),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: "cmp",
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![(borrowed_self(), "other")],
ret_ty: Literal(path_std!(cx, cmp::Ordering)),
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
cs_cmp(a, b, c)
})),
}],
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
pub fn ordering_collapsed(cx: &mut ExtCtxt<'_>,
span: Span,
self_arg_tags: &[ast::Ident])
-> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
cx.expr_method_call(span, lft, cx.ident_of("cmp"), vec![rgt])
}
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
let test_id = cx.ident_of("cmp").gensym();
let equals_path = cx.path_global(span, cx.std_path(&["cmp", "Ordering", "Equal"]));
let cmp_path = cx.std_path(&["cmp", "Ord", "cmp"]);
// Builds:
//
// match ::std::cmp::Ord::cmp(&self_field1, &other_field1) {
// ::std::cmp::Ordering::Equal =>
// match ::std::cmp::Ord::cmp(&self_field2, &other_field2) {
// ::std::cmp::Ordering::Equal => {
// ...
// }
// cmp => cmp
// },
// cmp => cmp
// }
//
cs_fold(// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// match new {
// ::std::cmp::Ordering::Equal => old,
// cmp => cmp
// }
let new = {
let other_f = match (other_fs.len(), other_fs.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"),
};
let args = vec![
cx.expr_addr_of(span, self_f),
cx.expr_addr_of(span, other_f.clone()),
];
cx.expr_call_global(span, cmp_path.clone(), args)
};
let eq_arm = cx.arm(span,
vec![cx.pat_path(span, equals_path.clone())],
old);
let neq_arm = cx.arm(span,
vec![cx.pat_ident(span, test_id)],
cx.expr_ident(span, test_id));
cx.expr_match(span, new, vec![eq_arm, neq_arm])
},
cx.expr_path(equals_path.clone()),
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`")
} else {
ordering_collapsed(cx, span, tag_tuple)
}
}),
cx,
span,
substr)
}
| 35.151261 | 89 | 0.49223 |
de75b7b4a9d2ff4f07b27ea7917a4501eb908bcb | 11,243 | use crate::{
border::{Border, BorderBuilder},
brush::{Brush, GradientPoint},
core::{algebra::Vector2, color::Color, math::Rect, pool::Handle},
draw::DrawingContext,
message::{
DecoratorMessage, MessageData, MessageDirection, UiMessage, UiMessageData, WidgetMessage,
},
node::UINode,
widget::Widget,
BuildContext, Control, NodeHandleMapping, UserInterface, BRUSH_BRIGHT, BRUSH_LIGHT,
BRUSH_LIGHTER, BRUSH_LIGHTEST, COLOR_DARKEST, COLOR_LIGHTEST,
};
use std::ops::{Deref, DerefMut};
/// A visual element that changes its appearance by listening specific events.
/// It can has "pressed", "hover", "selected" or normal appearance:
///
/// `Pressed` - enables on mouse down message.
/// `Selected` - whether decorator selected or not.
/// `Hovered` - mouse is over decorator.
/// `Normal` - not selected, pressed, hovered.
///
/// This element is widely used to provide some generic visual behaviour for various
/// widgets. For example it used to decorate button, items in items control.
#[derive(Clone)]
pub struct Decorator<M: MessageData, C: Control<M, C>> {
border: Border<M, C>,
normal_brush: Brush,
hover_brush: Brush,
pressed_brush: Brush,
selected_brush: Brush,
disabled_brush: Brush,
is_selected: bool,
pressable: bool,
}
impl<M: MessageData, C: Control<M, C>> Deref for Decorator<M, C> {
type Target = Widget<M, C>;
fn deref(&self) -> &Self::Target {
&self.border
}
}
impl<M: MessageData, C: Control<M, C>> DerefMut for Decorator<M, C> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.border
}
}
impl<M: MessageData, C: Control<M, C>> Control<M, C> for Decorator<M, C> {
fn resolve(&mut self, node_map: &NodeHandleMapping<M, C>) {
self.border.resolve(node_map)
}
fn measure_override(
&self,
ui: &UserInterface<M, C>,
available_size: Vector2<f32>,
) -> Vector2<f32> {
self.border.measure_override(ui, available_size)
}
fn arrange_override(&self, ui: &UserInterface<M, C>, final_size: Vector2<f32>) -> Vector2<f32> {
self.border.arrange_override(ui, final_size)
}
fn arrange(&self, ui: &UserInterface<M, C>, final_rect: &Rect<f32>) {
self.border.arrange(ui, final_rect)
}
fn is_measure_valid(&self, ui: &UserInterface<M, C>) -> bool {
self.border.is_measure_valid(ui)
}
fn is_arrange_valid(&self, ui: &UserInterface<M, C>) -> bool {
self.border.is_arrange_valid(ui)
}
fn measure(&self, ui: &UserInterface<M, C>, available_size: Vector2<f32>) {
self.border.measure(ui, available_size);
}
fn draw(&self, drawing_context: &mut DrawingContext) {
self.border.draw(drawing_context)
}
fn update(&mut self, dt: f32) {
self.border.update(dt)
}
fn handle_routed_message(
&mut self,
ui: &mut UserInterface<M, C>,
message: &mut UiMessage<M, C>,
) {
self.border.handle_routed_message(ui, message);
match &message.data() {
UiMessageData::Decorator(msg) => match msg {
&DecoratorMessage::Select(value) => {
if self.is_selected != value {
self.is_selected = value;
if self.is_selected {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.selected_brush.clone(),
));
} else {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.normal_brush.clone(),
));
}
}
}
DecoratorMessage::HoverBrush(brush) => {
self.hover_brush = brush.clone();
if self.is_mouse_directly_over {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.hover_brush.clone(),
));
}
}
DecoratorMessage::NormalBrush(brush) => {
self.normal_brush = brush.clone();
if !self.is_selected && !self.is_mouse_directly_over {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.normal_brush.clone(),
));
}
}
DecoratorMessage::PressedBrush(brush) => {
self.pressed_brush = brush.clone();
}
DecoratorMessage::SelectedBrush(brush) => {
self.selected_brush = brush.clone();
if self.is_selected {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.selected_brush.clone(),
));
}
}
},
UiMessageData::Widget(msg) => {
if message.destination() == self.handle()
|| self.has_descendant(message.destination(), ui)
{
match msg {
WidgetMessage::MouseLeave => {
if self.is_selected {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.selected_brush.clone(),
));
} else {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.normal_brush.clone(),
));
}
}
WidgetMessage::MouseEnter => {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.hover_brush.clone(),
));
}
WidgetMessage::MouseDown { .. } if self.pressable => {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.pressed_brush.clone(),
));
}
WidgetMessage::MouseUp { .. } => {
if self.is_selected {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.selected_brush.clone(),
));
} else {
ui.send_message(WidgetMessage::background(
self.handle(),
MessageDirection::ToWidget,
self.normal_brush.clone(),
));
}
}
_ => {}
}
}
}
_ => {}
}
}
fn remove_ref(&mut self, handle: Handle<UINode<M, C>>) {
self.border.remove_ref(handle)
}
}
pub struct DecoratorBuilder<M: MessageData, C: Control<M, C>> {
border_builder: BorderBuilder<M, C>,
normal_brush: Option<Brush>,
hover_brush: Option<Brush>,
pressed_brush: Option<Brush>,
selected_brush: Option<Brush>,
disabled_brush: Option<Brush>,
pressable: bool,
}
impl<M: MessageData, C: Control<M, C>> DecoratorBuilder<M, C> {
pub fn new(border_builder: BorderBuilder<M, C>) -> Self {
Self {
border_builder,
normal_brush: None,
hover_brush: None,
pressed_brush: None,
selected_brush: None,
disabled_brush: None,
pressable: true,
}
}
pub fn with_normal_brush(mut self, brush: Brush) -> Self {
self.normal_brush = Some(brush);
self
}
pub fn with_hover_brush(mut self, brush: Brush) -> Self {
self.hover_brush = Some(brush);
self
}
pub fn with_pressed_brush(mut self, brush: Brush) -> Self {
self.pressed_brush = Some(brush);
self
}
pub fn with_selected_brush(mut self, brush: Brush) -> Self {
self.selected_brush = Some(brush);
self
}
pub fn with_disabled_brush(mut self, brush: Brush) -> Self {
self.disabled_brush = Some(brush);
self
}
pub fn with_pressable(mut self, pressable: bool) -> Self {
self.pressable = pressable;
self
}
pub fn build(mut self, ui: &mut BuildContext<M, C>) -> Handle<UINode<M, C>> {
let normal_brush = self.normal_brush.unwrap_or(BRUSH_LIGHT);
if self.border_builder.widget_builder.foreground.is_none() {
self.border_builder.widget_builder.foreground = Some(Brush::LinearGradient {
from: Vector2::new(0.5, 0.0),
to: Vector2::new(0.5, 1.0),
stops: vec![
GradientPoint {
stop: 0.0,
color: COLOR_LIGHTEST,
},
GradientPoint {
stop: 0.25,
color: COLOR_LIGHTEST,
},
GradientPoint {
stop: 1.0,
color: COLOR_DARKEST,
},
],
});
}
let mut border = self.border_builder.build_border();
border.set_background(normal_brush.clone());
let node = UINode::Decorator(Decorator {
border,
normal_brush,
hover_brush: self.hover_brush.unwrap_or(BRUSH_LIGHTER),
pressed_brush: self.pressed_brush.unwrap_or(BRUSH_LIGHTEST),
selected_brush: self.selected_brush.unwrap_or(BRUSH_BRIGHT),
disabled_brush: self
.disabled_brush
.unwrap_or_else(|| Brush::Solid(Color::opaque(50, 50, 50))),
is_selected: false,
pressable: self.pressable,
});
ui.add_node(node)
}
}
| 36.035256 | 100 | 0.479054 |
c11c828d2e339eeb0df512a4175cabcdf40015d9 | 2,622 | use std::borrow::Cow;
use crate::{
attributes::{self, get_pyo3_options, is_attribute_ident, take_attributes, NameAttribute},
deprecations::Deprecations,
};
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{
ext::IdentExt,
parse::{Parse, ParseStream},
spanned::Spanned,
Result,
};
pub struct ConstSpec {
pub rust_ident: syn::Ident,
pub attributes: ConstAttributes,
}
impl ConstSpec {
pub fn python_name(&self) -> Cow<Ident> {
if let Some(name) = &self.attributes.name {
Cow::Borrowed(&name.0)
} else {
Cow::Owned(self.rust_ident.unraw())
}
}
/// Null-terminated Python name
pub fn null_terminated_python_name(&self) -> TokenStream {
let name = format!("{}\0", self.python_name());
quote!({#name})
}
}
pub struct ConstAttributes {
pub is_class_attr: bool,
pub name: Option<NameAttribute>,
pub deprecations: Deprecations,
}
pub enum PyO3ConstAttribute {
Name(NameAttribute),
}
impl Parse for PyO3ConstAttribute {
fn parse(input: ParseStream) -> Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(attributes::kw::name) {
input.parse().map(PyO3ConstAttribute::Name)
} else {
Err(lookahead.error())
}
}
}
impl ConstAttributes {
pub fn from_attrs(attrs: &mut Vec<syn::Attribute>) -> syn::Result<Self> {
let mut attributes = ConstAttributes {
is_class_attr: false,
name: None,
deprecations: Deprecations::new(),
};
take_attributes(attrs, |attr| {
if is_attribute_ident(attr, "classattr") {
ensure_spanned!(
attr.tokens.is_empty(),
attr.span() => "`#[classattr]` does not take any arguments"
);
attributes.is_class_attr = true;
Ok(true)
} else if let Some(pyo3_attributes) = get_pyo3_options(attr)? {
for pyo3_attr in pyo3_attributes {
match pyo3_attr {
PyO3ConstAttribute::Name(name) => attributes.set_name(name)?,
}
}
Ok(true)
} else {
Ok(false)
}
})?;
Ok(attributes)
}
fn set_name(&mut self, name: NameAttribute) -> Result<()> {
ensure_spanned!(
self.name.is_none(),
name.0.span() => "`name` may only be specified once"
);
self.name = Some(name);
Ok(())
}
}
| 26.755102 | 93 | 0.54958 |
d7aebf277eefcbd296809da983cff4f3cda92715 | 1,301 | use std::io::stdout;
use async_trait::async_trait;
use clap::{ArgMatches, Command};
use libcli_rs::output::{OutputFactory, OutputTrait};
use simpledi_rs::di::{DIContainer, DIContainerTrait};
use huber_common::model::config::Config;
use huber_common::result::Result;
use crate::cmd::{CommandAsyncTrait, CommandTrait};
use crate::service::repo::RepoService;
use crate::service::ItemOperationTrait;
pub(crate) const CMD_NAME: &str = "list";
#[derive(Debug)]
pub(crate) struct RepoListCmd;
unsafe impl Send for RepoListCmd {}
unsafe impl Sync for RepoListCmd {}
impl RepoListCmd {
pub(crate) fn new() -> Self {
Self {}
}
}
impl<'help> CommandTrait<'help> for RepoListCmd {
fn app(&self) -> Command<'help> {
Command::new(CMD_NAME)
.visible_alias("ls")
.about("List repositories")
}
}
#[async_trait]
impl CommandAsyncTrait for RepoListCmd {
async fn run(
&self,
config: &Config,
container: &DIContainer,
_matches: &ArgMatches,
) -> Result<()> {
let repo_service = container.get::<RepoService>().unwrap();
let repos = repo_service.list()?;
output!(config.output_format, .display(
stdout(),
&repos,
None,
None,
))
}
}
| 22.824561 | 67 | 0.624904 |
c15d0465bb97c338f7efd796734cec3598be21d8 | 74 | pub mod prompt_for_entry;
pub mod prompt_for_backup;
pub mod prompt_error; | 24.666667 | 26 | 0.851351 |
8a4160af55e3261984af3f631b52cb178e5dfc1f | 6,744 | #![allow(dead_code)]
use stdoor::trace;
fn main() {
smallest_largest_value();
all_abs();
}
fn smallest_largest_value() {
println!("\n定义在各模块的有符号整型最小最大值(已被取代)");
trace("i8 smallest value", 32, format!("std::i8::MIN = {}", std::i8::MIN).as_str(), None);
trace("i8 largest value", 32, format!("std::i8::MAX = {}", std::i8::MAX).as_str(), None);
trace("i16 smallest value", 32, format!("std::i16::MIN = {}", std::i16::MIN).as_str(), None);
trace("i16 largest value", 32, format!("std::i16::MAX = {}", std::i16::MAX).as_str(), None);
trace("i32 smallest value", 32, format!("std::i32::MIN = {}", std::i32::MIN).as_str(), None);
trace("i32 largest value", 32, format!("std::i32::MAX = {}", std::i32::MAX).as_str(), None);
trace("i64 smallest value", 32, format!("std::i64::MIN = {}", std::i64::MIN).as_str(), None);
trace("i64 largest value", 32, format!("std::i64::MAX = {}", std::i64::MAX).as_str(), None);
trace("i128 smallest value", 32, format!("std::i128::MIN = {}", std::i128::MIN).as_str(), None);
trace("i128 largest value", 32, format!("std::i128::MAX = {}", std::i128::MAX).as_str(), None);
trace("isize smallest value", 32, format!("std::isize::MIN = {}", std::isize::MIN).as_str(), None);
trace("isize largest value", 32, format!("std::isize::MAX = {}", std::isize::MAX).as_str(), None);
println!("\n定义在各原始类型的有符号整型最小最大值");
trace("i8 smallest value", 32, format!("i8::MIN = {}", i8::MIN).as_str(), None);
trace("i8 largest value", 32, format!("i8::MAX = {}", i8::MAX).as_str(), None);
trace("i16 smallest value", 32, format!("i16::MIN = {}", i16::MIN).as_str(), None);
trace("i16 largest value", 32, format!("i16::MAX = {}", i16::MAX).as_str(), None);
trace("i32 smallest value", 32, format!("i32::MIN = {}", i32::MIN).as_str(), None);
trace("i32 largest value", 32, format!("i32::MAX = {}", i32::MAX).as_str(), None);
trace("i64 smallest value", 32, format!("i64::MIN = {}", i64::MIN).as_str(), None);
trace("i64 largest value", 32, format!("i64::MAX = {}", i64::MAX).as_str(), None);
trace("i128 smallest value", 32, format!("i128::MIN = {}", i128::MIN).as_str(), None);
trace("i128 largest value", 32, format!("i128::MAX = {}", i128::MAX).as_str(), None);
trace("isize smallest value", 32, format!("isize::MIN = {}", isize::MIN).as_str(), None);
trace("isize largest value", 32, format!("isize::MAX = {}", isize::MAX).as_str(), None);
println!("\n定义在各模块的无符号整型最小最大值(已被取代)");
trace("u8 smallest value", 32, format!("std::u8::MIN = {}", std::u8::MIN).as_str(), None);
trace("u8 largest value", 32, format!("std::u8::MAX = {}", std::u8::MAX).as_str(), None);
trace("u16 smallest value", 32, format!("std::u16::MIN = {}", std::u16::MIN).as_str(), None);
trace("u16 largest value", 32, format!("std::u16::MAX = {}", std::u16::MAX).as_str(), None);
trace("u32 smallest value", 32, format!("std::u32::MIN = {}", std::u32::MIN).as_str(), None);
trace("u32 largest value", 32, format!("std::u32::MAX = {}", std::u32::MAX).as_str(), None);
trace("u64 smallest value", 32, format!("std::u64::MIN = {}", std::u64::MIN).as_str(), None);
trace("u64 largest value", 32, format!("std::u64::MAX = {}", std::u64::MAX).as_str(), None);
trace("u128 smallest value", 32, format!("std::u128::MIN = {}", std::u128::MIN).as_str(), None);
trace("u128 largest value", 32, format!("std::u128::MAX = {}", std::u128::MAX).as_str(), None);
trace("usize smallest value", 32, format!("std::usize::MIN = {}", std::usize::MIN).as_str(), None);
trace("usize largest value", 32, format!("std::usize::MAX = {}", std::usize::MAX).as_str(), None);
println!("\n定义在各原始类型的无符号整型最小最大值");
trace("u8 smallest value", 32, format!("u8::MIN = {}", u8::MIN).as_str(), None);
trace("u8 largest value", 32, format!("u8::MAX = {}", u8::MAX).as_str(), None);
trace("u16 smallest value", 32, format!("u16::MIN = {}", u16::MIN).as_str(), None);
trace("u16 largest value", 32, format!("u16::MAX = {}", u16::MAX).as_str(), None);
trace("u32 smallest value", 32, format!("u32::MIN = {}", u32::MIN).as_str(), None);
trace("u32 largest value", 32, format!("u32::MAX = {}", u32::MAX).as_str(), None);
trace("u64 smallest value", 32, format!("u64::MIN = {}", u64::MIN).as_str(), None);
trace("u64 largest value", 32, format!("u64::MAX = {}", u64::MAX).as_str(), None);
trace("u128 smallest value", 32, format!("u128::MIN = {}", u128::MIN).as_str(), None);
trace("u128 largest value", 32, format!("u128::MAX = {}", u128::MAX).as_str(), None);
trace("usize smallest value", 32, format!("usize::MIN = {}", usize::MIN).as_str(), None);
trace("usize largest value", 32, format!("usize::MAX = {}", usize::MAX).as_str(), None);
}
fn all_abs() {
{
// abs取绝对值(不安全)
let x = -1i8;
trace(format!("abs({}) = ", x).as_str(), 32, format!("{:<}", x.abs()).as_str(), Some(""));
if cfg!(debug_assertions) {
trace(format!("abs({}) = ", i8::MIN).as_str(), 32, format!("a panic in debug mode").as_str(), Some(""));
} else {
//在 Debug 模式下会报错, 在 Release 模式下不会报错且返回 i8::MIN
trace(format!("abs({}) = ", i8::MIN).as_str(), 32, format!("{:<}", i8::MIN.abs()).as_str(), Some(""));
}
}
{
// checked_abs取绝对值
let x = -1i8;
trace(format!("checked_abs({}) = ", x).as_str(), 32, format!("{:<}", x.checked_abs().unwrap()).as_str(), Some(""));
if let None = i8::MIN.checked_abs() {
trace(format!("checked_abs({}) = ", i8::MIN).as_str(), 32, format!("None").as_str(), Some(""));
}
}
{
// overflowing_abs取绝对值, 返回元组(i8, bool), t.0=正确的绝对值 或 i8::MIN, t.1=是否溢出
let x = -1i8;
trace(format!("overflowing_abs({}) = ", x).as_str(), 32, format!("{:<?}", x.overflowing_abs()).as_str(), Some(""));
trace(format!("overflowing_abs({}) = ", i8::MIN).as_str(), 32, format!("{:<?}", i8::MIN.overflowing_abs()).as_str(), Some(""));
}
{
// saturating_abs取绝对值, 如果溢出, 返回 i8::MAX
let x = -1i8;
trace(format!("saturating_abs({}) = ", x).as_str(), 32, format!("{:<}", x.saturating_abs()).as_str(), Some(""));
trace(format!("saturating_abs({}) = ", i8::MIN).as_str(), 32, format!("{:<}", i8::MIN.saturating_abs()).as_str(), Some(""));
}
{
// wrapping_abs取绝对值, 如果溢出, 返回 i8::MIN
let x = -1i8;
trace(format!("wrapping_abs({}) = ", x).as_str(), 32, format!("{:<}", x.wrapping_abs()).as_str(), Some(""));
trace(format!("wrapping_abs({}) = ", i8::MIN).as_str(), 32, format!("{:<}", i8::MIN.wrapping_abs()).as_str(), Some(""));
}
}
fn all_add() {
}
| 55.735537 | 135 | 0.558571 |
235b17a82eb8033629b6b31f835857de4be18312 | 5,043 | use {neon::prelude::*, solana_program::pubkey::Pubkey, std::str::FromStr};
fn to_vec(cx: &mut FunctionContext, list: &Handle<JsValue>) -> NeonResult<Vec<String>> {
if list.is_a::<JsArray, _>(cx) {
let list = list.downcast::<JsArray, _>(cx).unwrap();
let len = list.len(cx) as usize;
let mut result: Vec<String> = Vec::with_capacity(len);
for index in 0..len {
let item = list.get(cx, index as u32)?;
let item = item.to_string(cx)?;
let item = item.value(cx);
result.insert(index, item);
}
return Ok(result);
} else if list.is_a::<JsString, _>(cx) {
let item = list.downcast::<JsString, _>(cx).unwrap().value(cx);
let result = vec![item];
return Ok(result);
} else {
return Ok(vec![]);
}
}
trait AddressJob {
fn try_manipulate_with_address(
&self,
program_id: &Pubkey,
a1: &String,
a2: &String,
a3: &String,
a4: &String,
) -> Option<Pubkey>;
}
struct FinderPubKey;
impl AddressJob for FinderPubKey {
fn try_manipulate_with_address(
&self,
program_id: &Pubkey,
a1: &String,
a2: &String,
a3: &String,
a4: &String,
) -> Option<Pubkey> {
let s2 = Pubkey::from_str(&a2[..]).ok()?;
let s3 = Pubkey::from_str(&a3[..]).ok()?;
let s4 = Pubkey::from_str(&a4[..]).ok()?;
let seeds = [a1.as_bytes(), s2.as_ref(), s3.as_ref(), s4.as_ref()];
let (key, _) = Pubkey::try_find_program_address(&seeds, &program_id)?;
Some(key)
}
}
struct FinderLastBuf;
impl AddressJob for FinderLastBuf {
fn try_manipulate_with_address(
&self,
program_id: &Pubkey,
a1: &String,
a2: &String,
a3: &String,
a4: &String,
) -> Option<Pubkey> {
let s2 = Pubkey::from_str(&a2[..]).ok()?;
let s3 = Pubkey::from_str(&a3[..]).ok()?;
let seeds = [a1.as_bytes(), s2.as_ref(), s3.as_ref(), a4.as_bytes()];
let (key, _) = Pubkey::try_find_program_address(&seeds, &program_id)?;
Some(key)
}
}
struct CreateAddress3Pubkey;
impl AddressJob for CreateAddress3Pubkey {
fn try_manipulate_with_address(
&self,
program_id: &Pubkey,
a1: &String,
a2: &String,
a3: &String,
a4: &String,
) -> Option<Pubkey> {
let s2 = Pubkey::from_str(&a2[..]).ok()?;
let s3 = Pubkey::from_str(&a3[..]).ok()?;
let s4 = a4.parse::<u8>().ok()?;
let s4 = [s4];
let seeds = [a1.as_bytes(), s2.as_ref(), s3.as_ref(), &s4];
let key = Pubkey::create_program_address(&seeds, &program_id).ok()?;
Some(key)
}
}
fn get_finder(mode: &str) -> Option<Box<dyn AddressJob>> {
match mode {
"FinderPubKey" => Some(Box::new(FinderPubKey {})),
"FinderLastBuf" => Some(Box::new(FinderLastBuf {})),
"CreateAddress3Pubkey" => Some(Box::new(CreateAddress3Pubkey {})),
_ => None,
}
}
fn program_address(mut cx: FunctionContext) -> JsResult<JsUndefined> {
let mode = cx.argument::<JsString>(0)?.value(&mut cx);
let program_key = cx.argument::<JsString>(1)?.value(&mut cx);
let block1 = cx.argument::<JsString>(2)?.value(&mut cx);
let block2 = cx.argument::<JsString>(3)?.value(&mut cx);
let blocks3 = cx.argument::<JsValue>(4)?;
let blocks4 = cx.argument::<JsValue>(5)?;
let program_id = Pubkey::from_str(&program_key[..]).unwrap();
let cb_root = cx.argument::<JsFunction>(6)?.root(&mut cx);
let channel = cx.channel();
let blocks3 = to_vec(&mut cx, &blocks3)?;
let blocks4 = to_vec(&mut cx, &blocks4)?;
std::thread::spawn(move || {
channel.send(move |mut cx| {
let finder = get_finder(&mode[..]).unwrap();
let callback = cb_root.into_inner(&mut cx);
let null = cx.null();
let block1_n = cx.string(&block1);
let block2_n = cx.string(&block2);
for block3 in &blocks3 {
for block4 in &blocks4 {
let address = finder.try_manipulate_with_address(
&program_id,
&block1,
&block2,
block3,
block4,
);
if let Some(key) = address {
let key = key.to_string();
let key = cx.string(key);
let val3 = cx.string(block3);
let val4 = cx.string(block4);
callback.call(&mut cx, null, vec![key, block1_n, block2_n, val3, val4])?;
}
}
}
callback.call(&mut cx, null, vec![null])?;
Ok({})
});
});
Ok(cx.undefined())
}
#[neon::main]
fn main(mut cx: ModuleContext) -> NeonResult<()> {
cx.export_function("programAddressList", program_address)?;
Ok(())
}
| 32.326923 | 97 | 0.525679 |
2fd2d6dc37670ae16807a5029893d55b72a37c7b | 45,777 | use std::convert::TryFrom;
use crate::ast::ModuleStmt::*;
use crate::ast::*;
use crate::builders::{
alt,
delimited,
many0,
many1,
map,
op_expr_builder,
opt,
pair,
preceded,
separated,
terminated,
verify,
};
use crate::errors::ParseError;
use crate::node::{
Node,
Span,
};
use crate::tokenizer::types::{
Token,
TokenType,
};
use crate::{
Cursor,
ParseResult,
};
/// Parse next token in input.
pub fn next(input: Cursor) -> ParseResult<&Token> {
match input.first() {
Some(tok) => Ok((&input[1..], tok)),
None => Err(ParseError::eof(input)),
}
}
/// Parse a token of a specific type.
pub fn token<'a>(typ: TokenType) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {
verify(
next,
move |t| t.typ == typ,
move |inp, _| ParseError::str(inp, &format!("expected {:?} token", typ)),
)
}
/// Parse a name token.
pub fn name_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::NAME)(input)
}
/// Parse a name token containing a specific string.
#[allow(clippy::needless_lifetimes)]
pub fn name<'a>(string: &'a str) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {
verify(
name_token,
move |t| t.string == string,
move |inp, _| ParseError::str(inp, &format!("expected \"{}\" name token", string)),
)
}
/// Parse an op token.
pub fn op_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::OP)(input)
}
/// Parse an op token containing a specific string.
#[allow(clippy::needless_lifetimes)]
pub fn op<'a>(string: &'a str) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {
verify(
op_token,
move |t| t.string == string,
move |inp, _| ParseError::str(inp, &format!("expected \"{}\" op token", string)),
)
}
/// Parse a number token.
pub fn number_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::NUMBER)(input)
}
/// Parse a string token.
pub fn string_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::STRING)(input)
}
/// Parse an indent token.
pub fn indent_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::INDENT)(input)
}
/// Parse a dedent token.
pub fn dedent_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::DEDENT)(input)
}
/// Parse a grammatically significant newline token.
pub fn newline_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::NEWLINE)(input)
}
/// Parse an endmarker token.
pub fn endmarker_token(input: Cursor) -> ParseResult<&Token> {
token(TokenType::ENDMARKER)(input)
}
/// Parse a module definition.
pub fn file_input(input: Cursor) -> ParseResult<Node<Module>> {
alt((empty_file_input, non_empty_file_input))(input)
}
/// Parse an empty module definition.
pub fn empty_file_input(input: Cursor) -> ParseResult<Node<Module>> {
let (input, newline_tok) = newline_token(input)?;
let (input, _) = endmarker_token(input)?;
Ok((input, Node::new(Module { body: vec![] }, newline_tok.span)))
}
/// Parse a non-empty module definition.
pub fn non_empty_file_input(input: Cursor) -> ParseResult<Node<Module>> {
// module_stmt+
let (input, body) = many1(module_stmt)(input)?;
// ENDMARKER
let (input, _) = endmarker_token(input)?;
let span = {
let first = body.first().unwrap();
let last = body.last().unwrap();
Span::from_pair(first, last)
};
Ok((input, Node::new(Module { body }, span)))
}
/// Parse a module statement, such as a contract definition.
pub fn module_stmt(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
alt((import_stmt, type_def, contract_def, struct_def))(input)
}
/// Parse an import statement.
pub fn import_stmt(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
terminated(alt((simple_import, from_import)), newline_token)(input)
}
/// Parse an import statement beginning with the "import" keyword.
pub fn simple_import(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
let (input, import_kw) = name("import")(input)?;
let (input, first_name) = simple_import_name(input)?;
let (input, mut other_names) = many0(preceded(op(","), simple_import_name))(input)?;
let mut result = vec![first_name];
result.append(&mut other_names);
let span = {
let last = result.last().unwrap();
Span::from_pair(import_kw, last)
};
Ok((input, Node::new(SimpleImport { names: result }, span)))
}
pub fn simple_import_name(input: Cursor) -> ParseResult<Node<SimpleImportName>> {
let (input, path) = dotted_name(input)?;
let (input, alias) = opt(preceded(name("as"), name_token))(input)?;
let first = path.first().unwrap();
let last = path.last().unwrap();
let path_span = Span::from_pair(first, last);
let span = {
match alias {
Some(alias_tok) => Span::from_pair(path_span, alias_tok),
None => path_span,
}
};
Ok((
input,
Node::new(
SimpleImportName {
path,
alias: alias.map(|t| t.into()),
},
span,
),
))
}
/// Parse an import statement beginning with the "from" keyword.
pub fn from_import(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
alt((from_import_parent_alt, from_import_sub_alt))(input)
}
/// Parse a "from" import with a path that contains only parent module
/// components.
pub fn from_import_parent_alt(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
let (input, from_kw) = name("from")(input)?;
let (input, parent_level) = dots_to_int(input)?;
let (input, _) = name("import")(input)?;
let (input, names) = from_import_names(input)?;
let path = Node::new(
FromImportPath::Relative {
parent_level: parent_level.kind,
path: vec![],
},
parent_level.span,
);
let span = Span::from_pair(from_kw, names.span);
Ok((input, Node::new(FromImport { path, names }, span)))
}
/// Parse a "from" import with a path that contains sub module components.
pub fn from_import_sub_alt(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
let (input, from_kw) = name("from")(input)?;
let (input, path) = from_import_sub_path(input)?;
let (input, _) = name("import")(input)?;
let (input, names) = from_import_names(input)?;
let span = Span::from_pair(from_kw, names.span);
Ok((input, Node::new(FromImport { path, names }, span)))
}
/// Parse a path containing sub module components in a "from" import statement.
pub fn from_import_sub_path(input: Cursor) -> ParseResult<Node<FromImportPath>> {
let (input, opt_parent_level) = opt(dots_to_int)(input)?;
let (input, path) = dotted_name(input)?;
let first = path.first().unwrap();
let last = path.last().unwrap();
let span = Span::from_pair(first, last);
let result = match opt_parent_level {
Some(parent_level) => {
let span = Span::from_pair(&parent_level, span);
Node::new(
FromImportPath::Relative {
parent_level: parent_level.kind,
path,
},
span,
)
}
None => Node::new(FromImportPath::Absolute { path }, span),
};
Ok((input, result))
}
/// Parse the names to be imported by a "from" import statement.
pub fn from_import_names(input: Cursor) -> ParseResult<Node<FromImportNames>> {
alt((
from_import_names_star,
from_import_names_parens,
from_import_names_list,
))(input)
}
/// Parse a wildcard token ("*") in a "from" import statement.
pub fn from_import_names_star(input: Cursor) -> ParseResult<Node<FromImportNames>> {
let (input, star) = op("*")(input)?;
Ok((input, Node::new(FromImportNames::Star, star.span)))
}
/// Parse a parenthesized list of names to be imported by a "from" import
/// statement.
pub fn from_import_names_parens(input: Cursor) -> ParseResult<Node<FromImportNames>> {
let (input, l_paren) = op("(")(input)?;
let (input, names) = from_import_names_list(input)?;
let (input, r_paren) = op(")")(input)?;
Ok((
input,
Node::new(names.kind, Span::from_pair(l_paren, r_paren)),
))
}
/// Parse a list of names to be imported by a "from" import statement.
pub fn from_import_names_list(input: Cursor) -> ParseResult<Node<FromImportNames>> {
let (input, first_name) = from_import_name(input)?;
let (input, mut other_names) = many0(preceded(op(","), from_import_name))(input)?;
let (input, comma_tok) = opt(op(","))(input)?;
let mut names = vec![first_name];
names.append(&mut other_names);
let span = {
let first = names.first().unwrap();
match comma_tok {
Some(tok) => Span::from_pair(first, tok),
None => {
let last = names.last().unwrap();
Span::from_pair(first, last)
}
}
};
Ok((input, Node::new(FromImportNames::List(names), span)))
}
/// Parse an import name with an optional alias in a "from" import statement.
pub fn from_import_name(input: Cursor) -> ParseResult<Node<FromImportName>> {
let (input, name_tok) = name_token(input)?;
let (input, alias) = opt(preceded(name("as"), name_token))(input)?;
let span = match alias {
Some(alias_tok) => Span::from_pair(name_tok, alias_tok),
None => name_tok.span,
};
Ok((
input,
Node::new(
FromImportName {
name: name_tok.into(),
alias: alias.map(|t| t.into()),
},
span,
),
))
}
/// Parse a dotted import name.
pub fn dotted_name(input: Cursor) -> ParseResult<Vec<Node<String>>> {
separated(map(name_token, |t| t.into()), op("."), false)(input)
}
/// Parse preceding dots used to indicate parent module imports in import
/// statements.
pub fn dots_to_int(input: Cursor) -> ParseResult<Node<usize>> {
let (input, toks) = many1(alt((op("."), op("..."))))(input)?;
let value = toks
.iter()
.map(|t| if t.string == "." { 1 } else { 3 })
.sum::<usize>()
- 1;
let span = {
let first = toks.first().unwrap();
let last = toks.last().unwrap();
Span::from_pair(*first, *last)
};
Ok((input, Node::new(value, span)))
}
/// Parse a contract definition statement.
pub fn contract_def(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
// "contract" name ":" NEWLINE
let (input, contract_kw) = name("contract")(input)?;
let (input, name_tok) = name_token(input)?;
let (input, _) = op(":")(input)?;
let (input, _) = newline_token(input)?;
// INDENT contract_stmt+ DEDENT
let (input, _) = indent_token(input)?;
let (input, fields) = many0(field)(input)?;
let (input, body) = many0(contract_stmt)(input)?;
let (input, _) = dedent_token(input)?;
let last_stmt = body
.last()
.map(|node| node.span)
.or_else(|| fields.last().map(|node| node.span))
.unwrap();
let span = Span::from_pair(contract_kw, last_stmt);
Ok((
input,
Node::new(
ContractDef {
name: name_tok.into(),
fields,
body,
},
span,
),
))
}
/// Parse a contract statement.
pub fn contract_stmt(input: Cursor) -> ParseResult<Node<ContractStmt>> {
alt((event_def, func_def))(input)
}
/// Parse a struct or contract field definition.
pub fn field(input: Cursor) -> ParseResult<Node<Field>> {
let (input, pub_qual) = opt_qualifier(input, "pub", PubQualifier {})?;
let (input, const_qual) = opt_qualifier(input, "const", ConstQualifier {})?;
let (input, name) = name_token(input)?;
let (input, _) = op(":")(input)?;
let (input, typ) = type_desc(input)?;
let (input, _) = newline_token(input)?;
let left_span = if let Some(node) = &pub_qual {
node.span
} else if let Some(node) = &const_qual {
node.span
} else {
name.span
};
let span = Span::from_pair(left_span, &typ);
Ok((
input,
Node::new(
Field {
pub_qual,
const_qual,
name: name.into(),
typ,
},
span,
),
))
}
/// Parse a struct definition statement.
pub fn struct_def(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
// "struct" name ":" NEWLINE
let (input, contract_kw) = name("struct")(input)?;
let (input, name_tok) = name_token(input)?;
let (input, _) = op(":")(input)?;
let (input, _) = newline_token(input)?;
// INDENT struct_field+ DEDENT
let (input, _) = indent_token(input)?;
let (input, fields) = many1(field)(input)?;
let (input, _) = dedent_token(input)?;
let last_stmt = fields.last().unwrap();
let span = Span::from_pair(contract_kw, last_stmt);
Ok((
input,
Node::new(
StructDef {
name: name_tok.into(),
fields,
},
span,
),
))
}
/// Parse an event definition statement.
pub fn event_def(input: Cursor) -> ParseResult<Node<ContractStmt>> {
// "event" name ":" NEWLINE
let (input, event_kw) = name("event")(input)?;
let (input, name_tok) = name_token(input)?;
let (input, _) = op(":")(input)?;
let (input, _) = newline_token(input)?;
// INDENT event_field+ DEDENT
let (input, _) = indent_token(input)?;
let (input, fields) = many1(event_field)(input)?;
let (input, _) = dedent_token(input)?;
let last_field = fields.last().unwrap();
let span = Span::from_pair(event_kw, last_field);
Ok((
input,
Node::new(
ContractStmt::EventDef {
name: name_tok.into(),
fields,
},
span,
),
))
}
/// Parse an event field definition.
pub fn event_field(input: Cursor) -> ParseResult<Node<EventField>> {
let (input, idx_qual) = opt_qualifier(input, "idx", IdxQualifier {})?;
let (input, name) = name_token(input)?;
let (input, _) = op(":")(input)?;
let (input, typ) = type_desc(input)?;
let (input, _) = newline_token(input)?;
let span = match &idx_qual {
Some(node) => Span::from_pair(node, &typ),
None => Span::from_pair(name, &typ),
};
Ok((
input,
Node::new(
EventField {
idx_qual,
name: name.into(),
typ,
},
span,
),
))
}
pub fn func_def(input: Cursor) -> ParseResult<Node<ContractStmt>> {
let (input, pub_qual) = opt_qualifier(input, "pub", PubQualifier {})?;
let (input, def_kw) = name("def")(input)?;
let (input, name_tok) = name_token(input)?;
let (input, _) = op("(")(input)?;
let (input, args) = arg_list(input)?;
let (input, _) = op(")")(input)?;
let (input, return_type) = opt(preceded(op("->"), base_or_tuple_type))(input)?;
let (input, _) = op(":")(input)?;
let (input, body) = block(input)?;
let last = body.last().unwrap();
let span = match &pub_qual {
Some(pub_qual) => Span::from_pair(pub_qual, last),
None => Span::from_pair(def_kw, last),
};
Ok((
input,
Node::new(
ContractStmt::FuncDef {
pub_qual,
name: name_tok.into(),
args,
return_type,
body,
},
span,
),
))
}
pub fn arg_list(input: Cursor) -> ParseResult<Vec<Node<FuncDefArg>>> {
match input[0] {
Token { string: ")", .. } => Ok((input, vec![])),
_ => separated(arg_def, op(","), true)(input),
}
}
pub fn arg_def(input: Cursor) -> ParseResult<Node<FuncDefArg>> {
let (input, name_tok) = name_token(input)?;
let (input, _) = op(":")(input)?;
let (input, typ) = type_desc(input)?;
let span = Span::from_pair(name_tok, &typ);
Ok((
input,
Node::new(
FuncDefArg {
name: name_tok.into(),
typ,
},
span,
),
))
}
/// Parse a type definition (type alias).
pub fn type_def(input: Cursor) -> ParseResult<Node<ModuleStmt>> {
let (input, type_kw) = name("type")(input)?;
let (input, name) = name_token(input)?;
let (input, _) = op("=")(input)?;
let (input, type_desc) = type_desc(input)?;
let (input, _) = newline_token(input)?;
let span = Span::from_pair(type_kw, &type_desc);
Ok((
input,
Node::new(
ModuleStmt::TypeDef {
name: name.into(),
typ: type_desc,
},
span,
),
))
}
/// Parse a type description e.g. "u256" or "map<address, bool>".
pub fn type_desc(input: Cursor) -> ParseResult<Node<TypeDesc>> {
alt((map_type, base_type, tuple_type))(input)
}
/// Parse all base and tuple types but not map types
pub fn base_or_tuple_type(input: Cursor) -> ParseResult<Node<TypeDesc>> {
alt((base_type, tuple_type))(input)
}
/// Parse a map type e.g. "map<address, bool".
pub fn map_type(input: Cursor) -> ParseResult<Node<TypeDesc>> {
alt((map_type_double, map_type_single))(input)
}
/// Parse a map type ending with a right-shift token.
///
/// Example:
/// map<address, map<u256, bool>>
pub fn map_type_double(input: Cursor) -> ParseResult<Node<TypeDesc>> {
let (input, map_kw_1) = name("map")(input)?;
let (input, _) = op("<")(input)?;
let (input, from_1) = base_type(input)?;
let (input, _) = op(",")(input)?;
let (input, map_kw_2) = name("map")(input)?;
let (input, _) = op("<")(input)?;
let (input, from_2) = base_type(input)?;
let (input, _) = op(",")(input)?;
let (input, to) = type_desc(input)?;
let (input, r_bracket) = op(">>")(input)?;
let inner_map = Node::new(
TypeDesc::Map {
from: Box::new(from_2),
to: Box::new(to),
},
Span::new(map_kw_2.span.start, r_bracket.span.end - 1),
);
Ok((
input,
Node::new(
TypeDesc::Map {
from: Box::new(from_1),
to: Box::new(inner_map),
},
Span::from_pair(map_kw_1, r_bracket),
),
))
}
/// Parse a map type ending with a greater-than token.
///
/// Example:
/// map< address, map<u256, map<bool, int128>> >
pub fn map_type_single(input: Cursor) -> ParseResult<Node<TypeDesc>> {
let (input, map_kw) = name("map")(input)?;
let (input, _) = op("<")(input)?;
let (input, from) = base_type(input)?;
let (input, _) = op(",")(input)?;
let (input, to) = type_desc(input)?;
let (input, r_bracket) = op(">")(input)?;
Ok((
input,
Node::new(
TypeDesc::Map {
from: Box::new(from),
to: Box::new(to),
},
Span::from_pair(map_kw, r_bracket),
),
))
}
/// Parse a base type along with an optional array dimension list.
///
/// Example:
/// int128[2][3]
pub fn base_type(input: Cursor) -> ParseResult<Node<TypeDesc>> {
let (input, base) = name_token(input)?;
let (input, dims) = arr_list(input)?;
let mut result = Node::new(
TypeDesc::Base {
base: base.string.to_string(),
},
base.into(),
);
for dim in dims {
let span = Span::from_pair(&result, &dim);
result = Node::new(
TypeDesc::Array {
typ: Box::new(result),
dimension: dim.kind,
},
span,
);
}
Ok((input, result))
}
/// Parse a tuple type.
///
/// Example:
/// (u64, bool)
pub fn tuple_type(input: Cursor) -> ParseResult<Node<TypeDesc>> {
let (input, opening) = op("(")(input)?;
let (input, types) = match input[0] {
Token { string: ")", .. } => Ok((input, vec![])),
_ => separated(base_type, op(","), true)(input),
}?;
let (input, closing) = op(")")(input)?;
let result = Node::new(
TypeDesc::Tuple { items: types },
Span::from_pair(opening, closing),
);
Ok((input, result))
}
/// Parse an array dimension list e.g. "[2][3]"
pub fn arr_list(input: Cursor) -> ParseResult<Vec<Node<usize>>> {
many0(arr_dim)(input)
}
/// Parse an array dimension e.g. "[2]"
pub fn arr_dim(input: Cursor) -> ParseResult<Node<usize>> {
let (num_input, l_bracket) = op("[")(input)?;
let (input, num_tok) = number_token(num_input)?;
let (input, r_bracket) = op("]")(input)?;
let n: usize = match num_tok.string.parse() {
Ok(n) => n,
Err(_) => {
return Err(ParseError::str(
num_input,
&format!("invalid integer literal \"{}\"", num_tok.string),
))
}
};
Ok((input, Node::new(n, Span::from_pair(l_bracket, r_bracket))))
}
pub fn try_from_tok<'a, P, O>(parser: P) -> impl Fn(Cursor<'a>) -> ParseResult<O>
where
O: TryFrom<&'a Token<'a>>,
<O as TryFrom<&'a Token<'a>>>::Error: std::fmt::Debug,
P: Fn(Cursor<'a>) -> ParseResult<&Token>,
{
map(parser, |tok| TryFrom::try_from(tok).unwrap())
}
pub fn opt_qualifier<'a, T>(
input: Cursor<'a>,
qual: &'static str,
ast_struct: T,
) -> ParseResult<'a, Option<Node<T>>> {
let (input, maybe_tok) = opt(name(qual))(input)?;
if let Some(tok) = maybe_tok {
Ok((input, Some(Node::new(ast_struct, tok.span))))
} else {
Ok((input, None))
}
}
/// Parse 'pub' qualifier
pub fn pub_qualifier(input: Cursor) -> ParseResult<Node<PubQualifier>> {
let (input, tok) = name("idx")(input)?;
Ok((input, Node::new(PubQualifier {}, tok.span)))
}
/// Parse 'const' qualifier
pub fn const_qualifier(input: Cursor) -> ParseResult<Node<ConstQualifier>> {
let (input, tok) = name("idx")(input)?;
Ok((input, Node::new(ConstQualifier {}, tok.span)))
}
/// Parse 'idx' qualifier
pub fn idx_qualifier(input: Cursor) -> ParseResult<Node<IdxQualifier>> {
let (input, tok) = name("idx")(input)?;
Ok((input, Node::new(IdxQualifier {}, tok.span)))
}
pub fn func_stmt(input: Cursor) -> ParseResult<Vec<Node<FuncStmt>>> {
alt((map(compound_stmt, |stmt| vec![stmt]), simple_stmt))(input)
}
pub fn simple_stmt(input: Cursor) -> ParseResult<Vec<Node<FuncStmt>>> {
terminated(separated(small_stmt, op(";"), true), newline_token)(input)
}
pub fn small_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
alt((
return_stmt,
assert_stmt,
emit_stmt,
pass_stmt,
break_stmt,
continue_stmt,
revert_stmt,
vardecl_stmt,
assign_stmt,
augassign_stmt,
map(exprs, |node| {
Node::new(
FuncStmt::Expr {
value: Node::new(node.kind, node.span),
},
node.span,
)
}),
))(input)
}
pub fn return_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, return_kw) = name("return")(input)?;
let (input, value) = opt(exprs)(input)?;
let span = match &value {
Some(exp) => Span::from_pair(return_kw, exp),
None => return_kw.span,
};
Ok((input, Node::new(FuncStmt::Return { value }, span)))
}
pub fn assert_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, assert_kw) = name("assert")(input)?;
let (input, test) = expr(input)?;
let (input, msg) = opt(preceded(op(","), expr))(input)?;
let span = match &msg {
Some(msg_expr) => Span::from_pair(assert_kw, msg_expr),
None => Span::from_pair(assert_kw, &test),
};
Ok((input, Node::new(FuncStmt::Assert { test, msg }, span)))
}
pub fn emit_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, emit_kw) = name("emit")(input)?;
let (input, value) = expr(input)?;
let span = Span::from_pair(emit_kw, &value);
Ok((input, Node::new(FuncStmt::Emit { value }, span)))
}
pub fn keyword_statement<'a, G>(
string: &'a str,
get_stmt: G,
) -> impl Fn(Cursor<'a>) -> ParseResult<Node<FuncStmt>>
where
G: Fn() -> FuncStmt,
{
move |input| map(name(string), |t| Node::new(get_stmt(), t.span))(input)
}
pub fn pass_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
keyword_statement("pass", || FuncStmt::Pass)(input)
}
pub fn break_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
keyword_statement("break", || FuncStmt::Break)(input)
}
pub fn continue_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
keyword_statement("continue", || FuncStmt::Continue)(input)
}
pub fn revert_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
keyword_statement("revert", || FuncStmt::Revert)(input)
}
pub fn vardecl_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, target_expr) = target(input)?;
let (input, _) = op(":")(input)?;
let (input, typ) = type_desc(input)?;
let (input, value) = opt(preceded(op("="), expr))(input)?;
let span = match &value {
Some(exp) => Span::from_pair(&target_expr, exp),
None => Span::from_pair(&target_expr, &typ),
};
Ok((
input,
Node::new(
FuncStmt::VarDecl {
target: target_expr,
typ,
value,
},
span,
),
))
}
pub fn assign_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, targets_vec) = many1(terminated(targets, op("=")))(input)?;
let (input, value) = exprs(input)?;
let first = targets_vec.first().unwrap();
let span = Span::from_pair(first, &value);
Ok((
input,
Node::new(
FuncStmt::Assign {
targets: targets_vec,
value,
},
span,
),
))
}
pub fn augassign_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, target_expr) = target(input)?;
let (input, aug_tok) = alt((
op("+="),
op("-="),
op("*="),
op("/="),
op("%="),
op("&="),
op("|="),
op("^="),
op("<<="),
op(">>="),
op("**="),
op("//="),
))(input)?;
let (input, value) = expr(input)?;
let span = Span::from_pair(&target_expr, &value);
Ok((
input,
Node::new(
FuncStmt::AugAssign {
target: target_expr,
op: TryFrom::try_from(aug_tok).unwrap(),
value,
},
span,
),
))
}
pub fn compound_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
alt((if_stmt, while_stmt, for_stmt))(input)
}
#[allow(clippy::needless_lifetimes)]
pub fn if_stmt_builder<'a>(string: &'a str) -> impl Fn(Cursor<'a>) -> ParseResult<Node<FuncStmt>> {
move |input| {
alt((
|input| {
let (input, keyword) = name(string)(input)?;
let (input, test) = expr(input)?;
let (input, _) = op(":")(input)?;
let (input, body) = block(input)?;
let (input, or_else) = elif_stmt(input)?;
let span = Span::from_pair(keyword, &or_else);
let or_else = vec![or_else];
Ok((
input,
Node::new(
FuncStmt::If {
test,
body,
or_else,
},
span,
),
))
},
|input| {
let (input, keyword) = name(string)(input)?;
let (input, test) = expr(input)?;
let (input, _) = op(":")(input)?;
let (input, body) = block(input)?;
let (input, or_else) = opt(else_block)(input)?;
let last_stmt = match &or_else {
Some(vec) => vec.last().unwrap(),
None => body.last().unwrap(),
};
let span = Span::from_pair(keyword, last_stmt);
let or_else = or_else.unwrap_or_else(Vec::new);
Ok((
input,
Node::new(
FuncStmt::If {
test,
body,
or_else,
},
span,
),
))
},
))(input)
}
}
pub fn if_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
if_stmt_builder("if")(input)
}
pub fn elif_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
if_stmt_builder("elif")(input)
}
pub fn else_block(input: Cursor) -> ParseResult<Vec<Node<FuncStmt>>> {
let (input, _) = name("else")(input)?;
let (input, _) = op(":")(input)?;
let (input, stmts) = block(input)?;
Ok((input, stmts))
}
pub fn while_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, while_kw) = name("while")(input)?;
let (input, test) = expr(input)?;
let (input, _) = op(":")(input)?;
let (input, body) = block(input)?;
let (input, or_else) = opt(else_block)(input)?;
let last_stmt = match &or_else {
Some(or_else_body) => or_else_body.last().unwrap(),
None => body.last().unwrap(),
};
let span = Span::from_pair(while_kw, last_stmt);
let or_else = or_else.unwrap_or_else(Vec::new);
Ok((
input,
Node::new(
FuncStmt::While {
test,
body,
or_else,
},
span,
),
))
}
pub fn for_stmt(input: Cursor) -> ParseResult<Node<FuncStmt>> {
let (input, for_kw) = name("for")(input)?;
let (input, target_expr) = targets(input)?;
let (input, _) = name("in")(input)?;
let (input, iter) = exprs(input)?;
let (input, _) = op(":")(input)?;
let (input, body) = block(input)?;
let (input, or_else) = opt(else_block)(input)?;
let last_stmt = match &or_else {
Some(or_else_body) => or_else_body.last().unwrap(),
None => body.last().unwrap(),
};
let span = Span::from_pair(for_kw, last_stmt);
let or_else = or_else.unwrap_or_else(Vec::new);
Ok((
input,
Node::new(
FuncStmt::For {
target: target_expr,
iter,
body,
or_else,
},
span,
),
))
}
pub fn block(input: Cursor) -> ParseResult<Vec<Node<FuncStmt>>> {
alt((simple_stmt, |input| {
let (input, _) = newline_token(input)?;
let (input, _) = indent_token(input)?;
let (input, stmts) = many1(func_stmt)(input)?;
let (input, _) = dedent_token(input)?;
let result: Vec<_> = stmts.into_iter().flatten().collect();
Ok((input, result))
}))(input)
}
/// Parse a comma-separated list of expressions.
pub fn exprs(input: Cursor) -> ParseResult<Node<Expr>> {
let (input, mut elts) = separated(expr, op(","), false)(input)?;
let (input, comma) = opt(op(","))(input)?;
let first = elts.first().unwrap();
let result = match comma {
Some(comma_tok) => {
let span = Span::from_pair(first, comma_tok);
Node::new(Expr::Tuple { elts }, span)
}
None => {
if elts.len() > 1 {
let last = elts.last().unwrap();
let span = Span::from_pair(first, last);
Node::new(Expr::Tuple { elts }, span)
} else {
elts.pop().unwrap()
}
}
};
Ok((input, result))
}
pub fn expr(input: Cursor) -> ParseResult<Node<Expr>> {
let (input, if_expr) = disjunct(input)?;
let (input, ternary) = opt(|input| {
let (input, _) = name("if")(input)?;
let (input, test) = disjunct(input)?;
let (input, _) = name("else")(input)?;
let (input, else_expr) = expr(input)?;
Ok((input, (test, else_expr)))
})(input)?;
let result = match ternary {
Some((test, else_expr)) => {
let span = Span::from_pair(&if_expr, &else_expr);
Node::new(
Expr::Ternary {
if_expr: Box::new(if_expr),
test: Box::new(test),
else_expr: Box::new(else_expr),
},
span,
)
}
None => if_expr,
};
Ok((input, result))
}
#[inline]
pub fn bool_op_builder(left: Node<Expr>, op: &Token, right: Node<Expr>) -> Expr {
Expr::BoolOperation {
left: Box::new(left),
op: TryFrom::try_from(op).unwrap(),
right: Box::new(right),
}
}
#[inline]
pub fn bin_op_builder(left: Node<Expr>, op: &Token, right: Node<Expr>) -> Expr {
Expr::BinOperation {
left: Box::new(left),
op: TryFrom::try_from(op).unwrap(),
right: Box::new(right),
}
}
#[inline]
pub fn unary_op_builder(op: &Token, operand: Node<Expr>) -> Expr {
Expr::UnaryOperation {
op: TryFrom::try_from(op).unwrap(),
operand: Box::new(operand),
}
}
#[inline]
pub fn comp_op_builder(left: Node<Expr>, op: Node<CompOperator>, right: Node<Expr>) -> Expr {
Expr::CompOperation {
left: Box::new(left),
op,
right: Box::new(right),
}
}
pub fn disjunct(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(conjunct, name("or"), bool_op_builder)(input)
}
pub fn conjunct(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(comparison, name("and"), bool_op_builder)(input)
}
pub fn comparison(input: Cursor) -> ParseResult<Node<Expr>> {
let (input, nots) = many0(name("not"))(input)?;
let (input, op_expr) = op_expr_builder(bitwise_or, comp_op, comp_op_builder)(input)?;
let mut result = op_expr;
for not_tok in nots.into_iter().rev() {
let span = Span::from_pair(not_tok, &result);
result = Node::new(unary_op_builder(not_tok, result), span);
}
Ok((input, result))
}
pub fn comp_op(input: Cursor) -> ParseResult<Node<CompOperator>> {
alt((
map(
alt((pair(name("not"), name("in")), pair(name("is"), name("not")))),
|toks| {
let (fst, snd) = toks;
TryFrom::try_from(&[fst, snd][..]).unwrap()
},
),
map(
alt((
op("<"),
op("<="),
op("=="),
op(">="),
op(">"),
op("!="),
name("in"),
name("is"),
)),
|tok| TryFrom::try_from(&[tok][..]).unwrap(),
),
))(input)
}
pub fn bitwise_or(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(bitwise_xor, op("|"), bin_op_builder)(input)
}
pub fn bitwise_xor(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(bitwise_and, op("^"), bin_op_builder)(input)
}
pub fn bitwise_and(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(shift_expr, op("&"), bin_op_builder)(input)
}
pub fn shift_expr(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(sum, alt((op("<<"), op(">>"))), bin_op_builder)(input)
}
pub fn sum(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(term, alt((op("+"), op("-"))), bin_op_builder)(input)
}
pub fn term(input: Cursor) -> ParseResult<Node<Expr>> {
op_expr_builder(
factor,
alt((op("*"), op("/"), op("//"), op("%"))),
bin_op_builder,
)(input)
}
pub fn factor(input: Cursor) -> ParseResult<Node<Expr>> {
let unary_op = |input| {
let (input, op_tok) = alt((op("+"), op("-"), op("~")))(input)?;
let (input, factor_expr) = factor(input)?;
let span = Span::from_pair(op_tok, &factor_expr);
Ok((
input,
Node::new(unary_op_builder(op_tok, factor_expr), span),
))
};
alt((unary_op, power))(input)
}
pub fn power(input: Cursor) -> ParseResult<Node<Expr>> {
let power_op = |input| {
let (input, primary_expr) = primary(input)?;
let (input, op_tok) = op("**")(input)?;
let (input, factor_expr) = factor(input)?;
let span = Span::from_pair(&primary_expr, &factor_expr);
Ok((
input,
Node::new(bin_op_builder(primary_expr, op_tok, factor_expr), span),
))
};
alt((power_op, primary))(input)
}
pub fn build_tail_expr(exp: Node<Expr>, tails: Vec<Tail<'_>>) -> Node<Expr> {
let mut result = exp;
for tail in tails {
match tail {
Tail::Attr(name_tok) => {
let span = Span::from_pair(&result, name_tok);
result = Node::new(
Expr::Attribute {
value: Box::new(result),
attr: name_tok.into(),
},
span,
);
}
Tail::Index(slices) => {
let span = Span::from_pair(&result, &slices);
result = Node::new(
Expr::Subscript {
value: Box::new(result),
slices,
},
span,
);
}
Tail::Call(args) => {
let span = Span::from_pair(&result, &args);
result = Node::new(
Expr::Call {
func: Box::new(result),
args,
},
span,
);
}
}
}
result
}
pub fn primary(input: Cursor) -> ParseResult<Node<Expr>> {
let (input, atom_expr) = atom(input)?;
let (input, tails) = many0(alt((attr_tail, index_tail, call_tail)))(input)?;
Ok((input, build_tail_expr(atom_expr, tails)))
}
pub fn slices(input: Cursor) -> ParseResult<Vec<Node<Slice>>> {
separated(slice, op(","), true)(input)
}
pub fn slice(input: Cursor) -> ParseResult<Node<Slice>> {
alt((
|input| {
let boxed_expr = map(expr, Box::new);
let (input, lower) = opt(&boxed_expr)(input)?;
let (input, colon) = op(":")(input)?;
let (input, upper) = opt(&boxed_expr)(input)?;
let (input, step) = opt(preceded(op(":"), &boxed_expr))(input)?;
let first = match &lower {
Some(bx) => bx.span,
None => colon.span,
};
let last = match (&upper, &step) {
(_, Some(bx)) => bx.span,
(Some(bx), _) => bx.span,
_ => colon.span,
};
let span = Span::from_pair(first, last);
Ok((input, Node::new(Slice::Slice { lower, upper, step }, span)))
},
map(expr, |e| {
Node::new(Slice::Index(Box::new(Node::new(e.kind, e.span))), e.span)
}),
))(input)
}
pub fn atom(input: Cursor) -> ParseResult<Node<Expr>> {
alt((
map(name("true"), |tok| Node::new(Expr::Bool(true), tok.span)),
map(name("false"), |tok| Node::new(Expr::Bool(false), tok.span)),
list,
map(group, |exp| Node::new(exp.kind.kind, exp.span)),
tuple,
map(name_token, |tok| {
Node::new(Expr::Name(tok.string.to_string()), tok.span)
}),
map(number_token, |tok| {
Node::new(Expr::Num(tok.string.to_string()), tok.span)
}),
map(many1(string_token), |toks| {
let tok_strings: Vec<_> = toks
.iter()
.map(|t| {
// We don't want to carry quotes around strings past the parsing stage
&t.string[1..t.string.len() - 1]
})
.collect();
let fst = toks.first().unwrap();
let snd = toks.last().unwrap();
Node::new(
Expr::Str(tok_strings.iter().map(|tok| tok.to_string()).collect()),
Span::from_pair(*fst, *snd),
)
}),
map(op("..."), |tok| Node::new(Expr::Ellipsis, tok.span)),
))(input)
}
pub fn list(input: Cursor) -> ParseResult<Node<Expr>> {
map(delimited(op("["), opt(exprs), op("]")), |node| {
use Expr::{
List,
Tuple,
};
let kind = match node.kind {
Some(Node {
kind: Tuple { elts },
..
}) => List { elts },
Some(exp) => List { elts: vec![exp] },
None => List { elts: vec![] },
};
let span = node.span;
Node::new(kind, span)
})(input)
}
pub fn tuple(input: Cursor) -> ParseResult<Node<Expr>> {
map(delimited(op("("), opt(exprs), op(")")), |node| {
use Expr::Tuple;
let kind = match node.kind {
Some(Node {
kind: Tuple { elts },
..
}) => Tuple { elts },
Some(exp) => exp.kind,
None => Tuple { elts: vec![] },
};
let span = node.span;
Node::new(kind, span)
})(input)
}
pub fn group(input: Cursor) -> ParseResult<Node<Node<Expr>>> {
delimited(op("("), expr, op(")"))(input)
}
pub fn args(input: Cursor) -> ParseResult<Vec<Node<CallArg>>> {
let kw_result = kwargs(input);
if kw_result.is_ok() {
return kw_result;
}
let (input, first) = expr(input)?;
let (input, rest) = opt(preceded(op(","), args))(input)?;
let mut results = vec![Node::new(
CallArg::Arg(Node::new(first.kind, first.span)),
first.span,
)];
if let Some(mut rest) = rest {
results.append(&mut rest);
}
Ok((input, results))
}
pub fn kwargs(input: Cursor) -> ParseResult<Vec<Node<CallArg>>> {
separated(kwarg, op(","), false)(input)
}
pub fn kwarg(input: Cursor) -> ParseResult<Node<CallArg>> {
let (input, name_tok) = name_token(input)?;
let (input, _) = op("=")(input)?;
let (input, value_expr) = expr(input)?;
let span = Span::from_pair(name_tok, &value_expr);
Ok((
input,
Node::new(
CallArg::Kwarg(Kwarg {
name: name_tok.into(),
value: Box::new(value_expr),
}),
span,
),
))
}
pub enum Tail<'a> {
Attr(&'a Token<'a>),
Index(Node<Vec<Node<Slice>>>),
Call(Node<Vec<Node<CallArg>>>),
}
pub fn targets(input: Cursor) -> ParseResult<Node<Expr>> {
let (input, mut elts) = separated(target, op(","), false)(input)?;
let (input, comma) = opt(op(","))(input)?;
let first = elts.first().unwrap();
let result = match comma {
Some(comma_tok) => {
let span = Span::from_pair(first, comma_tok);
Node::new(Expr::Tuple { elts }, span)
}
None => {
if elts.len() > 1 {
let last = elts.last().unwrap();
let span = Span::from_pair(first, last);
Node::new(Expr::Tuple { elts }, span)
} else {
elts.pop().unwrap()
}
}
};
Ok((input, result))
}
pub fn target(input: Cursor) -> ParseResult<Node<Expr>> {
alt((
|input| {
let (input, atom_expr) = atom(input)?;
let (input, tails) = many1(t_tail)(input)?;
let tails: Vec<_> = tails.into_iter().flatten().collect();
Ok((input, build_tail_expr(atom_expr, tails)))
},
|input| {
let (input, atom_expr) = t_atom(input)?;
let (input, tails) = many0(t_tail)(input)?;
let tails: Vec<_> = tails.into_iter().flatten().collect();
Ok((input, build_tail_expr(atom_expr, tails)))
},
))(input)
}
pub fn t_atom(input: Cursor) -> ParseResult<Node<Expr>> {
alt((
map(name_token, |tok| {
Node::new(Expr::Name(tok.string.to_string()), tok.span)
}),
map(delimited(op("("), targets, op(")")), |node| {
use Expr::Tuple;
let kind = match node.kind {
Node {
kind: Tuple { elts },
..
} => Tuple { elts },
exp => Tuple { elts: vec![exp] },
};
let span = node.span;
Node::new(kind, span)
}),
map(delimited(op("["), targets, op("]")), |node| {
use Expr::{
List,
Tuple,
};
let kind = match node.kind {
Node {
kind: Tuple { elts },
..
} => List { elts },
exp => List { elts: vec![exp] },
};
let span = node.span;
Node::new(kind, span)
}),
))(input)
}
pub fn t_tail(input: Cursor) -> ParseResult<Vec<Tail>> {
let (input, mut tails) = many0(call_tail)(input)?;
let (input, last) = alt((attr_tail, index_tail))(input)?;
tails.push(last);
Ok((input, tails))
}
pub fn attr_tail(input: Cursor) -> ParseResult<Tail> {
map(preceded(op("."), name_token), Tail::Attr)(input)
}
pub fn index_tail(input: Cursor) -> ParseResult<Tail> {
map(delimited(op("["), slices, op("]")), Tail::Index)(input)
}
pub fn call_tail(input: Cursor) -> ParseResult<Tail> {
map(
map(delimited(op("("), opt(args), op(")")), |node| {
Node::new(node.kind.unwrap_or_else(Vec::new), node.span)
}),
Tail::Call,
)(input)
}
| 28.205176 | 99 | 0.531206 |
e46df6ce943b8439b1ca75cc875f84c79aa4e237 | 2,627 | use std::time::Duration;
pub use self::cfg::{Cfg, CfgExpr};
pub use self::config::{homedir, Config, ConfigValue};
pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
pub use self::diagnostic_server::RustfixDiagnosticServer;
pub use self::errors::{internal, process_error};
pub use self::errors::{CargoResult, CargoResultExt, CliResult, Test};
pub use self::errors::{CargoTestError, CliError, ProcessError};
pub use self::flock::{FileLock, Filesystem};
pub use self::graph::Graph;
pub use self::hex::{hash_u64, short_hash, to_hex};
pub use self::lev_distance::lev_distance;
pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted};
pub use self::paths::{bytes2path, dylib_path, join_paths, path2bytes};
pub use self::paths::{dylib_path_envvar, normalize_path};
pub use self::process_builder::{process, ProcessBuilder};
pub use self::progress::{Progress, ProgressStyle};
pub use self::read2::read2;
pub use self::rustc::Rustc;
pub use self::sha256::Sha256;
pub use self::to_semver::ToSemver;
pub use self::to_url::ToUrl;
pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo};
pub use self::workspace::{
print_available_benches, print_available_binaries, print_available_examples,
print_available_tests,
};
mod cfg;
pub mod command_prelude;
pub mod config;
mod dependency_queue;
pub mod diagnostic_server;
pub mod errors;
mod flock;
pub mod graph;
pub mod hex;
pub mod important_paths;
pub mod job;
pub mod lev_distance;
mod lockserver;
pub mod machine_message;
pub mod network;
pub mod paths;
pub mod process_builder;
pub mod profile;
mod progress;
mod read2;
pub mod rustc;
mod sha256;
pub mod to_semver;
pub mod to_url;
pub mod toml;
mod vcs;
mod workspace;
pub fn elapsed(duration: Duration) -> String {
let secs = duration.as_secs();
if secs >= 60 {
format!("{}m {:02}s", secs / 60, secs % 60)
} else {
format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
}
}
/// Check the base requirements for a package name.
///
/// This can be used for other things than package names, to enforce some
/// level of sanity. Note that package names have other restrictions
/// elsewhere. `cargo new` has a few restrictions, such as checking for
/// reserved names. crates.io has even more restrictions.
pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> {
if let Some(ch) = name
.chars()
.find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-')
{
failure::bail!("Invalid character `{}` in {}: `{}`{}", ch, what, name, help);
}
Ok(())
}
| 31.650602 | 85 | 0.712981 |
e45dec30636ecb334dc9a297c30c8055c5f6f42b | 5,496 | #![no_std]
#![no_main]
extern crate alloc;
use alloc::{string::String, vec, vec::Vec};
use core::convert::TryInto;
use dimension_contract::{
contract_api::{self, runtime, storage},
ext_ffi,
unwrap_or_revert::UnwrapOrRevert,
};
use dimension_types::{
api_error, bytesrepr, contracts::NamedKeys, runtime_args, ApiError, CLType, CLValue,
ContractPackageHash, EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, Key, Parameter,
RuntimeArgs, URef,
};
const HASH_KEY_NAME: &str = "counter_package_hash";
const ACCESS_KEY_NAME: &str = "counter_package_access";
const CONTRACT_VERSION_KEY: &str = "contract_version";
const ENTRYPOINT_SESSION: &str = "session";
const ENTRYPOINT_COUNTER: &str = "counter";
const ARG_COUNTER_METHOD: &str = "method";
const ARG_CONTRACT_HASH_NAME: &str = "counter_contract_hash";
const COUNTER_VALUE_UREF: &str = "counter";
const METHOD_GET: &str = "get";
const METHOD_INC: &str = "inc";
#[no_mangle]
pub extern "C" fn counter() {
let uref = runtime::get_key(COUNTER_VALUE_UREF)
.unwrap_or_revert()
.try_into()
.unwrap_or_revert();
let method_name: String = runtime::get_named_arg(ARG_COUNTER_METHOD);
match method_name.as_str() {
METHOD_INC => storage::add(uref, 1),
METHOD_GET => {
let result: i32 = storage::read_or_revert(uref);
let return_value = CLValue::from_t(result).unwrap_or_revert();
runtime::ret(return_value);
}
_ => runtime::revert(ApiError::InvalidArgument),
}
}
#[no_mangle]
pub extern "C" fn session() {
let counter_key = get_counter_key();
let contract_hash = counter_key
.into_hash()
.unwrap_or_revert_with(ApiError::UnexpectedKeyVariant)
.into();
let entry_point_name = ENTRYPOINT_COUNTER;
let runtime_args = runtime_args! { ARG_COUNTER_METHOD => METHOD_INC };
runtime::call_contract(contract_hash, entry_point_name, runtime_args)
}
#[no_mangle]
pub extern "C" fn call() {
let (contract_package_hash, access_uref): (ContractPackageHash, URef) =
storage::create_contract_package_at_hash();
runtime::put_key(HASH_KEY_NAME, contract_package_hash.into());
runtime::put_key(ACCESS_KEY_NAME, access_uref.into());
let entry_points = get_entry_points();
let count_value_uref = storage::new_uref(0); //initialize counter
let named_keys = {
let mut ret = NamedKeys::new();
ret.insert(String::from(COUNTER_VALUE_UREF), count_value_uref.into());
ret
};
let (contract_hash, contract_version) =
storage::add_contract_version(contract_package_hash, entry_points, named_keys);
let version_uref = storage::new_uref(contract_version);
runtime::put_key(CONTRACT_VERSION_KEY, version_uref.into());
runtime::put_key(ARG_CONTRACT_HASH_NAME, contract_hash.into());
}
fn get_entry_points() -> EntryPoints {
let mut entry_points = EntryPoints::new();
// actual stored contract
// ARG_METHOD -> METHOD_GET or METHOD_INC
// ret -> counter value
let entry_point = EntryPoint::new(
ENTRYPOINT_COUNTER,
vec![Parameter::new(ARG_COUNTER_METHOD, CLType::String)],
CLType::I32,
EntryPointAccess::Public,
EntryPointType::Contract,
);
entry_points.add_entry_point(entry_point);
// stored session code that call a version of the stored contract
// ARG_CONTRACT_HASH -> ContractHash of METHOD_COUNTER
let entry_point = EntryPoint::new(
ENTRYPOINT_SESSION,
vec![Parameter::new(
ARG_CONTRACT_HASH_NAME,
CLType::ByteArray(32),
)],
CLType::Unit,
EntryPointAccess::Public,
EntryPointType::Session,
);
entry_points.add_entry_point(entry_point);
entry_points
}
fn get_counter_key() -> Key {
let name = ARG_CONTRACT_HASH_NAME;
let arg = {
let mut arg_size: usize = 0;
let ret = unsafe {
ext_ffi::dimension_get_named_arg_size(
name.as_bytes().as_ptr(),
name.len(),
&mut arg_size as *mut usize,
)
};
match api_error::result_from(ret) {
Ok(_) => {
if arg_size == 0 {
None
} else {
Some(arg_size)
}
}
Err(ApiError::MissingArgument) => None,
Err(e) => runtime::revert(e),
}
};
match arg {
Some(arg_size) => {
let arg_bytes = {
let res = {
let data_non_null_ptr = contract_api::alloc_bytes(arg_size);
let ret = unsafe {
ext_ffi::dimension_get_named_arg(
name.as_bytes().as_ptr(),
name.len(),
data_non_null_ptr.as_ptr(),
arg_size,
)
};
let data = unsafe {
Vec::from_raw_parts(data_non_null_ptr.as_ptr(), arg_size, arg_size)
};
api_error::result_from(ret).map(|_| data)
};
res.unwrap_or_revert()
};
bytesrepr::deserialize(arg_bytes).unwrap_or_revert_with(ApiError::InvalidArgument)
}
None => runtime::get_key(ARG_CONTRACT_HASH_NAME).unwrap_or_revert_with(ApiError::GetKey),
}
}
| 32.91018 | 99 | 0.610808 |
ed85d0315bd25d7d232de53e96c9ef97bf53ca0a | 2,303 | //! lint on multiple versions of a crate being used
use crate::utils::{run_lints, span_lint};
use rustc_hir::{Crate, CRATE_HIR_ID};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::DUMMY_SP;
use itertools::Itertools;
declare_clippy_lint! {
/// **What it does:** Checks to see if multiple versions of a crate are being
/// used.
///
/// **Why is this bad?** This bloats the size of targets, and can lead to
/// confusing error messages when structs or traits are used interchangeably
/// between different versions of a crate.
///
/// **Known problems:** Because this can be caused purely by the dependencies
/// themselves, it's not always possible to fix this issue.
///
/// **Example:**
/// ```toml
/// # This will pull in both winapi v0.3.x and v0.2.x, triggering a warning.
/// [dependencies]
/// ctrlc = "=3.1.0"
/// ansi_term = "=0.11.0"
/// ```
pub MULTIPLE_CRATE_VERSIONS,
cargo,
"multiple versions of the same crate being used"
}
declare_lint_pass!(MultipleCrateVersions => [MULTIPLE_CRATE_VERSIONS]);
impl LateLintPass<'_, '_> for MultipleCrateVersions {
fn check_crate(&mut self, cx: &LateContext<'_, '_>, _: &Crate<'_>) {
if !run_lints(cx, &[MULTIPLE_CRATE_VERSIONS], CRATE_HIR_ID) {
return;
}
let metadata = if let Ok(metadata) = cargo_metadata::MetadataCommand::new().exec() {
metadata
} else {
span_lint(cx, MULTIPLE_CRATE_VERSIONS, DUMMY_SP, "could not read cargo metadata");
return;
};
let mut packages = metadata.packages;
packages.sort_by(|a, b| a.name.cmp(&b.name));
for (name, group) in &packages.into_iter().group_by(|p| p.name.clone()) {
let group: Vec<cargo_metadata::Package> = group.collect();
if group.len() > 1 {
let versions = group.into_iter().map(|p| p.version).join(", ");
span_lint(
cx,
MULTIPLE_CRATE_VERSIONS,
DUMMY_SP,
&format!("multiple versions for dependency `{}`: {}", name, versions),
);
}
}
}
}
| 33.376812 | 94 | 0.594876 |
21ac5e871f4f6b6d8ada238223b0a15ab18c8414 | 5,234 | /* NOT generated by build script but by bindgen command line */
/* Therefore manual modifications can be made */
extern "C" {
#[cfg(target_arch = "wasm32")]
pub fn __println(arg1: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_read_state(
key: *const ::std::os::raw::c_char,
buffer: *mut ::std::os::raw::c_uchar,
bufferLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_read_state_ptr(
key: *const ::std::os::raw::c_char,
totalLen: ::std::os::raw::c_long,
) -> *mut ::std::os::raw::c_uchar;
}
extern "C" {
pub fn __faasm_write_state(
key: *const ::std::os::raw::c_char,
data: *const ::std::os::raw::c_uchar,
dataLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_append_state(
key: *const ::std::os::raw::c_char,
data: *const ::std::os::raw::c_uchar,
dataLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_read_appended_state(
key: *const ::std::os::raw::c_char,
data: *mut ::std::os::raw::c_uchar,
dataLen: ::std::os::raw::c_long,
nElems: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_clear_appended_state(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_write_state_offset(
key: *const ::std::os::raw::c_char,
totalLen: ::std::os::raw::c_long,
offset: ::std::os::raw::c_long,
data: *const ::std::os::raw::c_uchar,
dataLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_write_state_from_file(
key: *const ::std::os::raw::c_char,
filePath: *const ::std::os::raw::c_char,
) -> ::std::os::raw::c_uint;
}
extern "C" {
pub fn __faasm_read_state_offset(
key: *const ::std::os::raw::c_char,
totalLen: ::std::os::raw::c_long,
offset: ::std::os::raw::c_long,
buffer: *mut ::std::os::raw::c_uchar,
bufferLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_flag_state_dirty(
key: *const ::std::os::raw::c_char,
totalLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_flag_state_offset_dirty(
key: *const ::std::os::raw::c_char,
totalLen: ::std::os::raw::c_long,
offset: ::std::os::raw::c_long,
dataLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_read_state_offset_ptr(
key: *const ::std::os::raw::c_char,
totalLen: ::std::os::raw::c_long,
offset: ::std::os::raw::c_long,
len: ::std::os::raw::c_long,
) -> *mut ::std::os::raw::c_uchar;
}
extern "C" {
pub fn __faasm_push_state(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_push_state_partial(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_push_state_partial_mask(
key: *const ::std::os::raw::c_char,
maskKey: *const ::std::os::raw::c_char,
);
}
extern "C" {
pub fn __faasm_pull_state(key: *const ::std::os::raw::c_char, stateLen: ::std::os::raw::c_long);
}
extern "C" {
pub fn __faasm_lock_state_global(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_unlock_state_global(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_lock_state_read(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_unlock_state_read(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_lock_state_write(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_unlock_state_write(key: *const ::std::os::raw::c_char);
}
extern "C" {
pub fn __faasm_read_input(
buffer: *mut ::std::os::raw::c_uchar,
bufferLen: ::std::os::raw::c_long,
) -> ::std::os::raw::c_long;
}
extern "C" {
pub fn __faasm_write_output(
output: *const ::std::os::raw::c_uchar,
outputLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_chain_function(
name: *const ::std::os::raw::c_char,
inputData: *const ::std::os::raw::c_uchar,
inputDataSize: ::std::os::raw::c_long,
) -> ::std::os::raw::c_uint;
}
extern "C" {
pub fn __faasm_chain_this(
idx: ::std::os::raw::c_int,
inputData: *const ::std::os::raw::c_uchar,
inputDataSize: ::std::os::raw::c_long,
) -> ::std::os::raw::c_uint;
}
extern "C" {
pub fn __faasm_await_call(messageId: ::std::os::raw::c_uint) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __faasm_get_idx() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __faasm_get_py_user(
buffer: *mut ::std::os::raw::c_uchar,
bufferLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_get_py_func(
buffer: *mut ::std::os::raw::c_uchar,
bufferLen: ::std::os::raw::c_long,
);
}
extern "C" {
pub fn __faasm_get_py_idx() -> ::std::os::raw::c_int;
}
extern "C" {
pub fn __faasm_chain_py(
idx: ::std::os::raw::c_int,
inputData: *const ::std::os::raw::c_uchar,
inputDataSize: ::std::os::raw::c_long,
) -> ::std::os::raw::c_uint;
}
extern "C" {
pub fn __faasm_conf_flag(key: *const ::std::os::raw::c_char) -> ::std::os::raw::c_uint;
}
| 28.601093 | 100 | 0.57394 |
6a822cb215d8e819a3bd64acd84fd24f661a1674 | 1,539 | extern crate fosslim;
use fosslim::index;
use fosslim::naive_tf;
// for executing this tests with output
// cargo test test_naive_tf_cross_check -- --nocapture
#[test]
fn test_naive_tf_cross_check(){
let data_path = "data/licenses";
print!("Building index...");
let idx = index::build_from_path(data_path).expect("Failed to build test index");
println!("Done");
print!("Building the test model...");
let mdl = naive_tf::from_index(&idx);
println!("Done");
let mut true_pos = 0;
let mut false_neg = 0;
let mut n_docs = 0;
let mut fails = 0;
println!("Index. N_Terms: {}, N_Docs: #{}", idx.n_terms, idx.n_docs);
println!("Correct?|Expected|Result|Score");
for doc in idx.get_documents().iter() {
n_docs += 1;
if let Some(score) = mdl.match_document(doc) {
let res_label = score.label.unwrap_or("".to_string());
if doc.label == res_label {
println!("+| {} | {} | {}", doc.label, res_label, score.score);
true_pos += 1;
} else {
println!("-| {} | {} | {}", doc.label, res_label, score.score);
false_neg += 1;
}
} else {
println!("{} => NONE", doc.label);
fails += 1;
}
}
let accuracy = (true_pos as f32) / (n_docs as f32);
println!("#-- Summary\n\t Matched #{}\n\tCorrect: {}", n_docs, true_pos);
println!("\tFalse negatives: {}\n\tFails: {}\n\tAccuracy: {}", false_neg, fails, accuracy);
}
| 29.596154 | 95 | 0.557505 |
4a7da47c3f5ad0672cce09eb10e6bbd6f6a5b522 | 9,242 | use super::*;
use datamodel_connector::ConnectorCapability;
use serde::{Deserialize, Serialize};
use std::{convert::TryFrom, str::FromStr};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DatamodelWithParams {
datamodel: String,
parent: QueryParams,
child: QueryParams,
}
impl DatamodelWithParams {
/// Get a reference to the datamodel with params's datamodel.
pub fn datamodel(&self) -> &str {
self.datamodel.as_str()
}
/// Get a reference to the datamodel with params's parent.
pub fn parent(&self) -> &QueryParams {
&self.parent
}
/// Get a reference to the datamodel with params's child.
pub fn child(&self) -> &QueryParams {
&self.child
}
}
impl FromStr for DatamodelWithParams {
type Err = serde_json::Error;
fn from_str(from: &str) -> Result<Self, Self::Err> {
serde_json::from_str(from)
}
}
impl TryFrom<DatamodelWithParams> for String {
type Error = serde_json::Error;
fn try_from(from: DatamodelWithParams) -> Result<Self, Self::Error> {
serde_json::to_string(&from)
}
}
pub type DatamodelsAndCapabilities = (Vec<DatamodelWithParams>, Vec<Vec<ConnectorCapability>>);
pub fn schema_with_relation(
on_parent: &RelationField,
on_child: &RelationField,
id_only: bool,
) -> DatamodelsAndCapabilities {
let is_required_1to1 = on_parent.is_required() && on_child.is_required();
if is_required_1to1 {
panic!("required 1:1 relations must be rejected by the parser already");
}
// Query Params
let id_param = QueryParams::new(
"id",
QueryParamsWhere::identifier("id"),
QueryParamsWhereMany::many_ids("id"),
);
let compound_id_param = {
let fields = vec!["id_1", "id_2"];
let arg_name = "id_1_id_2";
QueryParams::new(
"id_1, id_2",
QueryParamsWhere::compound_identifier(fields.clone(), arg_name),
QueryParamsWhereMany::many_compounds(fields, arg_name),
)
};
let parent_unique_params = vec![
QueryParams::new(
"p",
QueryParamsWhere::identifier("p"),
QueryParamsWhereMany::many_ids("p"),
),
{
let fields = vec!["p_1", "p_2"];
let arg_name = "p_1_p_2";
QueryParams::new(
"p_1, p_2",
QueryParamsWhere::compound_identifier(fields.clone(), arg_name),
QueryParamsWhereMany::many_compounds(fields, arg_name),
)
},
];
let child_unique_params = vec![
QueryParams::new(
"c",
QueryParamsWhere::identifier("c"),
QueryParamsWhereMany::many_ids("c"),
),
{
let fields = vec!["c_1", "c_2"];
let arg_name = "c_1_c_2";
QueryParams::new(
"c_1, c_2",
QueryParamsWhere::compound_identifier(fields.clone(), arg_name),
QueryParamsWhereMany::many_compounds(fields, arg_name),
)
},
];
// we only support singular id fields with implicit many to many relations. https://github.com/prisma/prisma/issues/2262
let id_options = if on_parent.is_list() && on_child.is_list() {
SIMPLE_ID_OPTIONS.to_vec()
} else {
FULL_ID_OPTIONS.to_vec()
};
// TODO: Remove if we're sure we don't ever wanna keep the simple mode
let simple = false;
let mut datamodels: Vec<DatamodelWithParams> = vec![];
let mut required_capabilities: Vec<Vec<ConnectorCapability>> = vec![];
for parent_id in id_options.iter() {
for child_id in id_options.iter() {
// Based on Id and relation fields
for child_ref_to_parent in child_references(simple, parent_id, on_parent, on_child) {
for parent_ref_to_child in
parent_references(simple, child_id, &child_ref_to_parent, on_parent, on_child)
{
// TODO: The RelationReference.render() equality is a hack. Implement PartialEq instead
let is_virtual_req_rel_field =
on_parent.is_required() && parent_ref_to_child.render() == RelationReference::NoRef.render();
// skip required virtual relation fields as those are disallowed in a Prisma Schema
if is_virtual_req_rel_field {
continue;
}
// Only based on id
let parent_params = if id_only {
vec![id_param.clone()]
} else {
match *parent_id {
Identifier::Simple => parent_unique_params.clone_push(&id_param),
Identifier::Compound => parent_unique_params.clone_push(&compound_id_param),
Identifier::None => parent_unique_params.clone(),
}
};
let child_params = if id_only {
vec![id_param.clone()]
} else {
match *child_id {
Identifier::Simple => child_unique_params.clone_push(&id_param),
Identifier::Compound => child_unique_params.clone_push(&compound_id_param),
Identifier::None => child_unique_params.clone(),
}
};
for parent_param in parent_params.iter() {
for child_param in child_params.iter() {
let (parent_field, child_field) =
render_relation_fields(on_parent, &parent_ref_to_child, on_child, &child_ref_to_parent);
let datamodel = indoc::formatdoc! {"
model Parent {{
p String @unique
p_1 String
p_2 String
{parent_field}
non_unique String?
{parent_id}
@@unique([p_1, p_2])
}}
model Child {{
c String @unique
c_1 String
c_2 String
{child_field}
non_unique String?
{child_id}
@@unique([c_1, c_2])
}}
",
parent_field = parent_field,
parent_id = parent_id,
child_field = child_field,
child_id = child_id
};
let mut required_capabilities_for_dm = vec![];
match (parent_id, child_id) {
(Identifier::Compound, _) | (_, Identifier::Compound) => {
required_capabilities_for_dm.push(ConnectorCapability::CompoundIds)
}
(Identifier::None, _) | (_, Identifier::None) => {
required_capabilities_for_dm.push(ConnectorCapability::AnyId)
}
_ => (),
}
required_capabilities.push(required_capabilities_for_dm);
datamodels.push(DatamodelWithParams {
datamodel,
parent: parent_param.clone(),
child: child_param.clone(),
});
}
}
}
}
}
}
(datamodels, required_capabilities)
}
fn render_relation_fields(
parent: &RelationField,
parent_ref_to_child: &RelationReference,
child: &RelationField,
child_ref_to_parent: &RelationReference,
) -> (String, String) {
if parent.is_list() && child.is_list() {
let rendered_parent = format!("#m2m({}, {}, String)", parent.field_name(), parent.type_name());
let rendered_child = format!("#m2m({}, {}, String)", child.field_name(), child.type_name());
(rendered_parent, rendered_child)
} else {
let rendered_parent = format!(
"{} {} {}",
parent.field_name(),
parent.type_name(),
parent_ref_to_child.render()
);
let rendered_child = format!(
"{} {} {}",
child.field_name(),
child.type_name(),
child_ref_to_parent.render()
);
(rendered_parent, rendered_child)
}
}
| 36.674603 | 124 | 0.490695 |
d55b5b064dbe1b85dae1a4052136d6f681eba9d0 | 10,243 | //! This file implements what I refer to as HSL but which would precisely be called sHSL: a simple
//! transformation of sRGB that creates a cylindrical space. HSL has the same problems with
//! perceptual uniformity and general unsuitability for exact psychophysically-accurate
//! representation as color as sRGB does, but it does have the advantage of being easy to display on
//! a monitor and having some conception of common color attributes. HSL and HSV are very similar
//! but have an important difference: *value* in HSV runs from black to fully saturated colors,
//! whereas *lightness* or *luminosity* in HSL runs from black to fully saturated in the middle to
//! white at the end. This makes the saturation component of HSL extremely inaccurate, because light
//! colors can have a very high saturation even if they are extremely close to white. This space is
//! mathematically cylindrical, but when you account for the actual differentiation of colors
//! (saturation's actual importance varies with lightness) it forms a "bi-hexcone" model, where the
//! hue component is actually a hexagon but simply stretched into a circle, and the area of a
//! horizontal cross-section varies with lightness. A special note: some implementations of HSV and
//! HSL are circular in nature, using polar coordinates explicitly. This implementation is instead
//! hexagonal: first values are put on a hexagon, and then that hexagon is "squeezed" into a
//! circle. This can cause small variations between Scarlet and other applications.
//! Another small implementation note is that converting gray into HSL or HSV will give a hue of 0
//! degrees, although any hue could be used in its place.
use std::f64;
use std::f64::EPSILON;
use std::str::FromStr;
use bound::Bound;
use color::{Color, RGBColor, XYZColor};
use coord::Coord;
use csscolor::{parse_hsl_hsv_tuple, CSSParseError};
use illuminants::Illuminant;
/// A color in the HSL color space, a direct transformation of the sRGB space. sHSL is used to
/// distinguish this space from a similar transformation of a different RGB space, which can cause
/// some confusion as other implementations of HSL (such as on the web) omit this distinction.
/// # Example
/// Shifting from red to yellow creates two colors of clearly different brightnesses. This is because
/// HSL doesn't account for the perceptual difference in brightness of light and dark colors.
///
/// ```
/// # use scarlet::prelude::*;
/// # use scarlet::colors::HSLColor;
/// let red = HSLColor{h: 20., s: 0.5, l: 0.5};
/// let yellow = HSLColor{h: 60., s: 0.5, l: 0.5};
/// println!("{} {}", red.convert::<RGBColor>().to_string(), yellow.convert::<RGBColor>().to_string());
/// // prints #BF6A40 #BFBF40
/// // note how the second one is strictly more light
/// ```
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct HSLColor {
/// The hue component. Ranges from 0 to 360, as the angle in a cylindrical space. Exactly the same
/// as the hue component of HSV.
pub h: f64,
/// The saturation component. Ranges between 0 and 1. Note that this is much less accurate to
/// human perception than the chroma or saturation found in other, higher-fidelity color spaces.
pub s: f64,
/// The lightness component. Ranges from 0 to 1. Defined in HSL as the average of the largest and
/// smallest color components in RGB, which sacrifices accuracy for convenience.
pub l: f64,
}
impl Color for HSLColor {
/// Converts from XYZ to HSL through RGB: thus, there is a limited precision because RGB colors
/// are limited to integer values of R, G, and B.
fn from_xyz(xyz: XYZColor) -> HSLColor {
// first get RGB color
let rgb = RGBColor::from_xyz(xyz);
// this is sorta interesting: a hexagonal projection instead of the circular projection used
// in CIEHCL. It turns out that, if you tilt the RGB cube and project it into a hexagon, the
// equivalent of radius is simply the largest component minus the smallest component: adding
// a constant to every component simply travels up and down vertically and doesn't change the
// projection.
// I call this chroma, but it's a very very rough estimate of the actual color attribute.
// More info: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
let components = [rgb.r, rgb.g, rgb.b];
let max_c = components.iter().cloned().fold(-1.0, f64::max);
let min_c = components.iter().cloned().fold(2.0, f64::min);
let chroma = max_c - min_c;
// hue is crazy in a hexagon! no more trig functions for us!
// it's technically the proportion of the length of the hexagon through the point, but it's
// treated as degrees
let mut hue = if chroma == 0.0 {
// could be anything, undefined according to Wikipedia, in Scarlet just 0 for gray
0.0
} else if (max_c - rgb.r).abs() < EPSILON {
// in red sector: find which part by comparing green and blue and scaling
// adding green moves up on the hexagon, adding blue moves down: hence, linearity
// the modulo makes sure it's in the range 0-360
(((rgb.g - rgb.b) / chroma) % 6.0) * 60.0
} else if (max_c - rgb.g).abs() < EPSILON {
// similar to above, but you add an offset
(((rgb.b - rgb.r) / chroma) % 6.0) * 60.0 + 120.0
} else {
// same as above, different offset
(((rgb.r - rgb.g) / chroma) % 6.0) * 60.0 + 240.0
};
// if hue still not in 0-360, add until it does: this can sometimes happen
while hue < 0. {
hue += 360.;
}
while hue >= 360. {
hue -= 360.;
}
// saturation, scientifically speaking, is chroma adjusted for lightness. For HSL, it's
// defined relative to the maximum chroma, which varies depending on the place on the
// cone. Thus, I'll compute lightness first.
// now we choose lightness as the average of the largest and smallest components. This
// essentially translates to a double hex cone, quite the interesting structure!
let lightness = (max_c + min_c) / 2.0;
// now back to saturation
let saturation = if (lightness - 1.0).abs() < EPSILON || lightness == 0.0 {
// this would be a divide by 0 otherwise, just set it to 0 because it doesn't matter
0.0
} else {
chroma / (1.0 - (2.0 * lightness - 1.0).abs())
};
HSLColor {
h: hue,
s: saturation,
l: lightness,
}
}
// Converts back to XYZ through RGB.
fn to_xyz(&self, illuminant: Illuminant) -> XYZColor {
// first get back chroma
let chroma = (1.0 - (2.0 * self.l - 1.0).abs()) * self.s;
// find the point with 0 lightness that matches ours in the other two components
// intermediate value is the second-largest RGB value, where C is the largest because the
// smallest is 0: call this x
let x = chroma * (1.0 - ((self.h / 60.0) % 2.0 - 1.0).abs());
// now split based on which line of the hexagon we're on, i.e., which are the two largest
// components
let (r1, g1, b1) = if self.h <= 60.0 {
(chroma, x, 0.0)
} else if self.h <= 120.0 {
(x, chroma, 0.0)
} else if self.h <= 180.0 {
(0.0, chroma, x)
} else if self.h <= 240.0 {
(0.0, x, chroma)
} else if self.h <= 300.0 {
(x, 0.0, chroma)
} else {
(chroma, 0.0, x)
};
// now we add the right value to each component to get the correct lightness and scale back
// to 0-255
let offset = self.l - chroma / 2.0;
let r = r1 + offset;
let g = g1 + offset;
let b = b1 + offset;
RGBColor { r, g, b }.to_xyz(illuminant)
}
}
impl From<Coord> for HSLColor {
fn from(c: Coord) -> HSLColor {
HSLColor {
h: c.x,
s: c.y,
l: c.z,
}
}
}
impl Into<Coord> for HSLColor {
fn into(self) -> Coord {
Coord {
x: self.h,
y: self.s,
z: self.l,
}
}
}
impl Bound for HSLColor {
fn bounds() -> [(f64, f64); 3] {
[(0., 360.), (0., 1.), (0., 1.)]
}
}
impl FromStr for HSLColor {
type Err = CSSParseError;
fn from_str(s: &str) -> Result<HSLColor, CSSParseError> {
if !s.starts_with("hsl(") {
return Err(CSSParseError::InvalidColorSyntax);
}
let tup: String = s.chars().skip(3).collect::<String>();
match parse_hsl_hsv_tuple(&tup) {
Ok(res) => Ok(HSLColor {
h: res.0,
s: res.1,
l: res.2,
}),
Err(_e) => Err(_e),
}
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
use consts::TEST_PRECISION;
#[test]
fn test_hsl_rgb_conversion() {
let red_rgb = RGBColor {
r: 1.,
g: 0.,
b: 0.,
};
let red_hsl: HSLColor = red_rgb.convert();
assert!(red_hsl.h.abs() <= 0.0001);
assert!((red_hsl.s - 1.0) <= 0.0001);
assert!((red_hsl.l - 0.5) <= 0.0001);
assert!(red_hsl.distance(&red_rgb) < TEST_PRECISION);
let lavender_hsl = HSLColor {
h: 245.0,
s: 0.5,
l: 0.6,
};
let lavender_rgb: RGBColor = lavender_hsl.convert();
assert_eq!(lavender_rgb.to_string(), "#6E66CC");
}
#[test]
fn test_hsl_string_parsing() {
let red_hsl: HSLColor = "hsl(0, 120%, 50%)".parse().unwrap();
assert!(red_hsl.h.abs() <= 0.0001);
assert!((red_hsl.s - 1.0) <= 0.0001);
assert!((red_hsl.l - 0.5) <= 0.0001);
let lavender_hsl: HSLColor = "hsl(-475, 50%, 60%)".parse().unwrap();
let lavender_rgb: RGBColor = lavender_hsl.convert();
assert_eq!(lavender_rgb.to_string(), "#6E66CC");
// test error
assert!("hsl(254%, 0, 0)".parse::<HSLColor>().is_err());
}
}
| 41.808163 | 103 | 0.604706 |
cc806e47c30f76d645d5260c2c88c8e630eacd5a | 2,117 | use core::fmt;
use std::ops::{Add, Div, Mul, Sub};
use crate::Zero;
use super::with_var::WithVar;
use super::one_unknown::Equa1;
#[derive(Clone, Copy)]
pub(crate) struct Equa2<T>(pub T, pub WithVar<T>, pub WithVar<T>);
impl<T> fmt::Debug for Equa2<T>
where T: fmt::Debug + Copy {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?} + {:?} + {:?}", self.0, self.1, self.2)
}
}
impl<T> Equa2<T>{
pub fn new(non_var: T, a: WithVar<T>, b: WithVar<T>) -> Self {
Equa2(non_var, a, b)
}
pub fn set_eq(&self, other: Self) -> ((char, T), (char, T))
where T: Add<T, Output = T> + Sub<T, Output = T> + Mul<T, Output = T> + Div<T, Output = T> + fmt::Debug + Zero + Copy { // vars need to line up not great
println!("{:?} = 0.0\n{:?} = 0.0", self, other);
let var_1: (char, T) = (self.1.var, self.repl_var( Equa1::new(other.0 - self.0, other.1 - self.1) / (self.2 - other.2).val, self.2.var).solve(T::ZERO));
let var_2: (char, T) = (self.2.var, self.plug_in(var_1.0, var_1.1).solve(T::ZERO));
println!("{:?} = {:?}\n{:?} = {:?}", var_1.0, var_1.1, var_2.0, var_2.1);
(var_1, var_2)
}
pub fn repl_var(&self, equa_1: Equa1<T>, var: char) -> Equa1<T>
where T: Mul<T, Output = T> + Add<T, Output = T> + Copy {
let (val_mul_of_var, other_mul_1) = self.get_var_mul_and_other(var); // 0 has no var not included
let equa_1 = equa_1 * val_mul_of_var.val;
Equa1::new(self.0 + equa_1.0, other_mul_1 + equa_1.1)
}
pub fn get_var_mul_and_other(&self, var: char) -> (WithVar<T>, WithVar<T>)
where T: Copy{
if self.1.var == var {
(self.1, self.2)
}else {
(self.2, self.1)
}
}
pub fn plug_in(&self, var: char, val: T) -> Equa1<T>
where T: Mul<T, Output = T> + Add<T, Output = T> + Copy {
if self.1.var == var {
Equa1::new(self.0 + self.1.val * val, self.2)
}else {
Equa1::new(self.0 + self.2.val * val, self.1)
}
}
} | 36.5 | 161 | 0.522437 |
fe32510d2aa15da5cbf20c8d818f40a0d827db90 | 1,644 | //! Answer conversions
use crate::answer::db_types::{Answer, NewAnswer};
use wire;
impl Into<wire::answer::Answer> for Answer {
fn into(self) -> wire::answer::Answer {
wire::answer::Answer {
uuid: self.uuid,
user_uuid: self.user_uuid,
question_uuid: self.question_uuid,
publicly_visible: self.publicly_visible,
answer_text: self.answer_text,
updated_at: self.updated_at,
created_at: self.created_at,
}
}
}
impl From<wire::answer::Answer> for Answer {
fn from(answer: wire::answer::Answer) -> Self {
Answer {
uuid: answer.uuid,
user_uuid: answer.user_uuid,
question_uuid: answer.question_uuid,
publicly_visible: answer.publicly_visible,
answer_text: answer.answer_text,
updated_at: answer.updated_at,
created_at: answer.created_at,
}
}
}
impl Into<wire::answer::NewAnswer> for NewAnswer {
fn into(self) -> wire::answer::NewAnswer {
wire::answer::NewAnswer {
user_uuid: self.user_uuid,
question_uuid: self.question_uuid,
publicly_visible: self.publicly_visible,
answer_text: self.answer_text,
}
}
}
impl From<wire::answer::NewAnswer> for NewAnswer {
fn from(new_answer: wire::answer::NewAnswer) -> Self {
NewAnswer {
user_uuid: new_answer.user_uuid,
question_uuid: new_answer.question_uuid,
publicly_visible: new_answer.publicly_visible,
answer_text: new_answer.answer_text,
}
}
}
| 29.890909 | 58 | 0.605231 |
e52fe5dfbfb8af3b54813723193430d2a4ed4931 | 12,949 | // Copyright 2018-2019 Cryptape Technologies LLC.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Define public methods about convert.
use crate::fixed_hash::HashConstructor;
use crate::utils;
use quote::quote;
impl HashConstructor {
pub fn defun_pub_conv(&self) {
self.defun_pub_conv_from_slice();
self.defun_pub_conv_into_slice();
self.attach_error_for_conv_from_str();
self.defun_pub_conv_from_hex_str_dict();
self.defun_pub_conv_from_hex_str();
}
fn attach_error_for_conv_slice(&self, conv_type: &str, type_explain: &str) {
let error_name = &self.ts.error_name;
let error_item = utils::ident_to_ts(format!("{}Slice", conv_type).as_ref());
let inner_error_name = utils::ident_to_ts(format!("{}SliceError", conv_type).as_ref());
let error_explain = format!("failed to convert {} slice since {{}}", type_explain);
let part = quote!(
/// Error for parse from slice.
#[derive(Debug, Fail)]
pub enum #inner_error_name {
#[fail(display = "invalid length: {}", _0)]
InvalidLength(usize),
}
impl From<#inner_error_name> for #error_name {
fn from(err: #inner_error_name) -> #error_name {
#error_name::#error_item(err)
}
}
);
self.attach_common(part);
let part = quote!(
#[fail(display = #error_explain, _0)]
#error_item(#[fail(cause)] #inner_error_name),
);
self.error(part);
}
fn defun_pub_conv_from_slice(&self) {
self.attach_error_for_conv_slice("From", "from");
let error_name = &self.ts.error_name;
let bytes_size = &self.ts.unit_amount;
let part = quote!(
/// Convert from slice.
#[inline]
pub fn from_slice(input: &[u8]) -> Result<Self, #error_name> {
if input.len() != #bytes_size {
Err(FromSliceError::InvalidLength(input.len()))?
} else {
let mut ret = Self::zero();
ret.mut_inner()[..].copy_from_slice(input);
Ok(ret)
}
}
);
self.defun(part);
}
fn defun_pub_conv_into_slice(&self) {
self.attach_error_for_conv_slice("Into", "into");
let error_name = &self.ts.error_name;
let bytes_size = &self.ts.unit_amount;
let part = quote!(
/// Convert into slice.
#[inline]
pub fn into_slice(&self, output: &mut [u8]) -> Result<(), #error_name> {
if output.len() != #bytes_size {
Err(IntoSliceError::InvalidLength(output.len()))?
} else {
let inner = self.inner();
output.copy_from_slice(&inner[..]);
Ok(())
}
}
);
self.defun(part);
}
fn attach_error_for_conv_from_str(&self) {
let error_name = &self.ts.error_name;
let part = quote!(
/// Error for parse from string.
#[derive(Debug, Fail)]
pub enum FromStrError {
#[fail(display = "invalid character code `{}` at {}", chr, idx)]
InvalidCharacter { chr: u8, idx: usize },
#[fail(display = "invalid length: {}", _0)]
InvalidLength(usize),
}
impl From<FromStrError> for #error_name {
fn from(err: FromStrError) -> #error_name {
#error_name::FromStr(err)
}
}
);
self.attach_common(part);
let part = quote!(#[fail(display = "failed to parse from string {}", _0)]
FromStr(
#[fail(cause)]
FromStrError
),);
self.error(part);
}
fn defun_pub_conv_from_hex_str_dict(&self) {
let part = quote!(
pub(crate) const U8MX: u8 = u8::max_value();
pub(crate) static DICT_HEX_LO: [u8; 256] = [
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
0x0f, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, 0x0a,
0x0b, 0x0c, 0x0d, 0x0e, 0x0f, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX,
];
pub(crate) static DICT_HEX_HI: [u8; 256] = [
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, 0x00, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70,
0x80, 0x90, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0,
0xf0, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, 0xa0,
0xb0, 0xc0, 0xd0, 0xe0, 0xf0, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX, U8MX,
U8MX, U8MX, U8MX, U8MX,
];
);
self.util(part);
}
fn defun_pub_conv_from_hex_str(&self) {
let error_name = &self.ts.error_name;
let utils_name = &self.ts.utils_name;
let char_amount_max = utils::pure_uint_to_ts(self.info.unit_amount * 2);
let part_core = if self.info.expand {
let loop_unit_amount = &utils::pure_uint_list_to_ts(0..self.info.unit_amount);
let loop_utils_name_copy1 = &vec![utils_name; self.info.unit_amount as usize];
let loop_utils_name_copy2 = &vec![utils_name; self.info.unit_amount as usize];
let loop_utils_name_copy3 = &vec![utils_name; self.info.unit_amount as usize];
let loop_utils_name_copy4 = &vec![utils_name; self.info.unit_amount as usize];
quote!(
let mut input_bytes = input.bytes();
#({
let idx = #loop_unit_amount;
let hi = {
let chr = input_bytes.next().unwrap_or_else(|| unreachable!());
let hi = #loop_utils_name_copy1::DICT_HEX_HI[usize::from(chr)];
if hi == #loop_utils_name_copy2::U8MX {
Err(FromStrError::InvalidCharacter { chr, idx: idx*2 })?;
};
hi
};
let lo = {
let chr = input_bytes.next().unwrap_or_else(|| unreachable!());
let lo = #loop_utils_name_copy3::DICT_HEX_LO[usize::from(chr)];
if lo == #loop_utils_name_copy4::U8MX {
Err(FromStrError::InvalidCharacter { chr, idx: idx*2+1 })?;
};
lo
};
inner[idx] = hi | lo;
})*
)
} else {
quote!(for (idx, chr) in input.bytes().enumerate() {
let val = if idx % 2 == 0 {
#utils_name::DICT_HEX_HI[usize::from(chr)]
} else {
#utils_name::DICT_HEX_LO[usize::from(chr)]
};
if val == #utils_name::U8MX {
Err(FromStrError::InvalidCharacter { chr, idx })?;
}
inner[idx / 2] |= val;
})
};
let part = quote!(
/// Convert from a fixed length hexadecimal string.
#[inline]
pub fn from_hex_str(input: &str) -> Result<Self, #error_name> {
let len = input.len();
if len != #char_amount_max {
Err(FromStrError::InvalidLength(len))?;
}
let mut ret = Self::zero();
{
let inner = ret.mut_inner();
#part_core
}
Ok(ret)
}
/// Convert from an arbitrary length zero-trimmed hexadecimal string.
/// Fisrt char should not be zero if the input has more than one char.
#[inline]
pub fn from_trimmed_hex_str(input: &str) -> Result<Self, #error_name> {
let len = input.len();
if len == 0 || len > #char_amount_max {
Err(FromStrError::InvalidLength(len))?;
} else if input.as_bytes()[0] == b'0' {
if len == 1 {
return Ok(Self::zero());
} else {
Err(FromStrError::InvalidCharacter { chr: b'0', idx: 0 })?;
}
}
let mut ret = Self::zero();
let mut input_bytes = input.bytes();
let mut idx = 0;
let mut unit_idx = (#char_amount_max - len) / 2;
let mut high = len % 2 == 0;
{
let inner = ret.mut_inner();
for chr in input_bytes {
let val = if high {
#utils_name::DICT_HEX_HI[usize::from(chr)]
} else {
#utils_name::DICT_HEX_LO[usize::from(chr)]
};
if val == #utils_name::U8MX {
Err(FromStrError::InvalidCharacter { chr, idx })?;
}
idx += 1;
inner[unit_idx] |= val;
if high {
high = false;
} else {
high = true;
unit_idx += 1;
}
}
}
Ok(ret)
}
);
self.defun(part);
}
}
| 47.259124 | 99 | 0.497027 |
e547633cbfef9e2d038ddb9e5793e563955adb60 | 1,391 | use crate::{components::AddToCartButton, router::AppRoutes, types::Product};
use yew::prelude::*;
use yew_router::components::RouterAnchor;
pub struct ProductCard {
props: Props,
}
#[derive(Properties, Clone)]
pub struct Props {
pub product: Product,
#[prop_or_default]
pub on_add_to_cart: Callback<Product>,
}
impl Component for ProductCard {
type Message = ();
type Properties = Props;
fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {
Self { props }
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
true
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
true
}
fn view(&self) -> Html {
type Anchor = RouterAnchor<AppRoutes>;
html! {
<div class="product_card_container">
<Anchor route=AppRoutes::ProductDetail(self.props.product.id) classes="product_card_anchor">
<img class="product_card_image" src={&self.props.product.image}/>
<div class="product_card_name">{&self.props.product.name}</div>
<div class="product_card_price">{"$"}{&self.props.product.price}</div>
</Anchor>
<AddToCartButton product=self.props.product.clone() on_add_to_cart=self.props.on_add_to_cart.clone() />
</div>
}
}
}
| 29.595745 | 119 | 0.61179 |
e2c352fc7fa9e4a9a71cd0132f3dd3e02fa36c0c | 6,042 | //!
//! The malloc bdev as the name implies, creates an in memory disk. Note
//! that the backing memory is allocated from huge pages and not from the
//! heap. IOW, you must ensure you do not run out of huge pages while using
//! this.
use crate::{
bdev::{dev::reject_unknown_parameters, util::uri},
nexus_uri::{
NexusBdevError,
{self},
},
};
use async_trait::async_trait;
use std::{collections::HashMap, convert::TryFrom};
use url::Url;
use uuid::Uuid;
#[derive(Debug)]
pub struct Malloc {
/// the name of the bdev we created, this is equal to the URI path minus
/// the leading '/'
name: String,
/// alias which can be used to open the bdev
alias: String,
/// the number of blocks the device should have
num_blocks: u64,
/// the size of a single block if no blk_size is given we default to 512
blk_size: u32,
/// uuid of the spdk bdev
uuid: Option<uuid::Uuid>,
}
use crate::{
bdev::{CreateDestroy, GetName},
core::Bdev,
ffihelper::{cb_arg, done_errno_cb, ErrnoResult, IntoCString},
};
use futures::channel::oneshot;
use nix::errno::Errno;
use snafu::ResultExt;
use spdk_sys::delete_malloc_disk;
impl TryFrom<&Url> for Malloc {
type Error = NexusBdevError;
fn try_from(uri: &Url) -> Result<Self, Self::Error> {
let segments = uri::segments(uri);
if segments.is_empty() {
return Err(NexusBdevError::UriInvalid {
uri: uri.to_string(),
message: "no path segments".to_string(),
});
}
let mut parameters: HashMap<String, String> =
uri.query_pairs().into_owned().collect();
let blk_size: u32 = if let Some(value) = parameters.remove("blk_size") {
value.parse().context(nexus_uri::IntParamParseError {
uri: uri.to_string(),
parameter: String::from("blk_size"),
})?
} else {
512
};
if blk_size != 512 && blk_size != 4096 {
return Err(NexusBdevError::UriInvalid {
uri: uri.to_string(),
message:
"invalid blk_size specified must be one of 512 or 4096"
.to_string(),
});
}
let size: u32 = if let Some(value) = parameters.remove("size_mb") {
value.parse().context(nexus_uri::IntParamParseError {
uri: uri.to_string(),
parameter: String::from("size_mb"),
})?
} else {
0
};
let num_blocks: u32 =
if let Some(value) = parameters.remove("num_blocks") {
value.parse().context(nexus_uri::IntParamParseError {
uri: uri.to_string(),
parameter: String::from("blk_size"),
})?
} else {
0
};
if size != 0 && num_blocks != 0 {
return Err(NexusBdevError::UriInvalid {
uri: uri.to_string(),
message: "conflicting parameters num_blocks and size_mb are mutually exclusive"
.to_string(),
});
}
let uuid = uri::uuid(parameters.remove("uuid")).context(
nexus_uri::UuidParamParseError {
uri: uri.to_string(),
},
)?;
reject_unknown_parameters(uri, parameters)?;
Ok(Self {
name: uri.path()[1 ..].into(),
alias: uri.to_string(),
num_blocks: if num_blocks != 0 {
num_blocks
} else {
(size << 20) / blk_size
} as u64,
blk_size,
uuid: uuid.or_else(|| Some(Uuid::new_v4())),
})
}
}
impl GetName for Malloc {
fn get_name(&self) -> String {
self.name.clone()
}
}
#[async_trait(?Send)]
impl CreateDestroy for Malloc {
type Error = NexusBdevError;
async fn create(&self) -> Result<String, Self::Error> {
if Bdev::lookup_by_name(&self.name).is_some() {
return Err(NexusBdevError::BdevExists {
name: self.name.clone(),
});
}
let cname = self.name.clone().into_cstring();
let ret = unsafe {
let mut bdev: *mut spdk_sys::spdk_bdev = std::ptr::null_mut();
spdk_sys::create_malloc_disk(
&mut bdev,
cname.as_ptr(),
std::ptr::null_mut(),
self.num_blocks,
self.blk_size,
)
};
if ret != 0 {
Err(NexusBdevError::CreateBdev {
source: Errno::from_i32(ret),
name: self.name.clone(),
})
} else {
self.uuid.map(|u| {
Bdev::lookup_by_name(&self.name).map(|mut b| {
b.set_uuid(Some(u.to_string()));
if !b.add_alias(&self.alias) {
error!(
"Failed to add alias {} to device {}",
self.alias,
self.get_name()
);
}
})
});
Ok(self.name.clone())
}
}
async fn destroy(self: Box<Self>) -> Result<(), Self::Error> {
if let Some(bdev) = Bdev::lookup_by_name(&self.name) {
let (s, r) = oneshot::channel::<ErrnoResult<()>>();
unsafe {
delete_malloc_disk(
bdev.as_ptr(),
Some(done_errno_cb),
cb_arg(s),
)
};
r.await
.context(nexus_uri::CancelBdev {
name: self.name.clone(),
})?
.context(nexus_uri::DestroyBdev {
name: self.name,
})
} else {
Err(NexusBdevError::BdevNotFound {
name: self.name,
})
}
}
}
| 30.361809 | 95 | 0.490566 |
01ea63e862a493a97560c4f01b6da54c7103b21c | 28,346 | //! Hjson Serialization
//!
//! This module provides for Hjson serialization with the type `Serializer`.
use std::fmt::{Display, LowerExp};
use std::io;
use std::num::FpCategory;
use super::error::{Error, ErrorCode, Result};
use serde::ser;
use super::util::ParseNumber;
use regex::Regex;
use lazy_static::lazy_static;
/// A structure for serializing Rust values into Hjson.
pub struct Serializer<W, F> {
writer: W,
formatter: F,
}
impl<'a, W> Serializer<W, HjsonFormatter<'a>>
where
W: io::Write,
{
/// Creates a new Hjson serializer.
#[inline]
pub fn new(writer: W) -> Self {
Serializer::with_formatter(writer, HjsonFormatter::new())
}
}
impl<W, F> Serializer<W, F>
where
W: io::Write,
F: Formatter,
{
/// Creates a new Hjson visitor whose output will be written to the writer
/// specified.
#[inline]
pub fn with_formatter(writer: W, formatter: F) -> Self {
Serializer { writer, formatter }
}
/// Unwrap the `Writer` from the `Serializer`.
#[inline]
pub fn into_inner(self) -> W {
self.writer
}
}
#[doc(hidden)]
#[derive(Eq, PartialEq)]
pub enum State {
Empty,
First,
Rest,
}
#[doc(hidden)]
pub struct Compound<'a, W, F> {
ser: &'a mut Serializer<W, F>,
state: State,
}
impl<'a, W, F> ser::Serializer for &'a mut Serializer<W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
type SerializeSeq = Compound<'a, W, F>;
type SerializeTuple = Compound<'a, W, F>;
type SerializeTupleStruct = Compound<'a, W, F>;
type SerializeTupleVariant = Compound<'a, W, F>;
type SerializeMap = Compound<'a, W, F>;
type SerializeStruct = Compound<'a, W, F>;
type SerializeStructVariant = Compound<'a, W, F>;
#[inline]
fn serialize_bool(self, value: bool) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
if value {
self.writer.write_all(b"true").map_err(From::from)
} else {
self.writer.write_all(b"false").map_err(From::from)
}
}
#[inline]
fn serialize_i8(self, value: i8) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_i16(self, value: i16) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_i32(self, value: i32) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_i64(self, value: i64) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_u8(self, value: u8) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_u16(self, value: u16) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_u32(self, value: u32) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_u64(self, value: u64) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
write!(&mut self.writer, "{}", value).map_err(From::from)
}
#[inline]
fn serialize_f32(self, value: f32) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
fmt_f32_or_null(&mut self.writer, if value == -0f32 { 0f32 } else { value })
.map_err(From::from)
}
#[inline]
fn serialize_f64(self, value: f64) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
fmt_f64_or_null(&mut self.writer, if value == -0f64 { 0f64 } else { value })
.map_err(From::from)
}
#[inline]
fn serialize_char(self, value: char) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
escape_char(&mut self.writer, value).map_err(From::from)
}
#[inline]
fn serialize_str(self, value: &str) -> Result<()> {
quote_str(&mut self.writer, &mut self.formatter, value).map_err(From::from)
}
#[inline]
fn serialize_bytes(self, value: &[u8]) -> Result<()> {
let mut seq = self.serialize_seq(Some(value.len()))?;
for byte in value {
ser::SerializeSeq::serialize_element(&mut seq, byte)?
}
ser::SerializeSeq::end(seq)
}
#[inline]
fn serialize_unit(self) -> Result<()> {
self.formatter.start_value(&mut self.writer)?;
self.writer.write_all(b"null").map_err(From::from)
}
#[inline]
fn serialize_unit_struct(self, _name: &'static str) -> Result<()> {
self.serialize_unit()
}
#[inline]
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<()> {
self.serialize_str(variant)
}
/// Serialize newtypes without an object wrapper.
#[inline]
fn serialize_newtype_struct<T>(self, _name: &'static str, value: &T) -> Result<()>
where
T: ?Sized + ser::Serialize,
{
value.serialize(self)
}
#[inline]
fn serialize_newtype_variant<T>(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<()>
where
T: ?Sized + ser::Serialize,
{
self.formatter.open(&mut self.writer, b'{')?;
self.formatter.comma(&mut self.writer, true)?;
escape_key(&mut self.writer, variant)?;
self.formatter.colon(&mut self.writer)?;
value.serialize(&mut *self)?;
self.formatter.close(&mut self.writer, b'}')
}
#[inline]
fn serialize_none(self) -> Result<()> {
self.serialize_unit()
}
#[inline]
fn serialize_some<V>(self, value: &V) -> Result<()>
where
V: ?Sized + ser::Serialize,
{
value.serialize(self)
}
#[inline]
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq> {
let state = if len == Some(0) {
self.formatter.start_value(&mut self.writer)?;
self.writer.write_all(b"[]")?;
State::Empty
} else {
self.formatter.open(&mut self.writer, b'[')?;
State::First
};
Ok(Compound { ser: self, state })
}
#[inline]
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple> {
self.serialize_seq(Some(len))
}
#[inline]
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct> {
self.serialize_seq(Some(len))
}
#[inline]
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant> {
self.formatter.open(&mut self.writer, b'{')?;
self.formatter.comma(&mut self.writer, true)?;
escape_key(&mut self.writer, variant)?;
self.formatter.colon(&mut self.writer)?;
self.serialize_seq(Some(len))
}
#[inline]
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap> {
let state = if len == Some(0) {
self.formatter.start_value(&mut self.writer)?;
self.writer.write_all(b"{}")?;
State::Empty
} else {
self.formatter.open(&mut self.writer, b'{')?;
State::First
};
Ok(Compound { ser: self, state })
}
#[inline]
fn serialize_struct(self, _name: &'static str, len: usize) -> Result<Self::SerializeStruct> {
self.serialize_map(Some(len))
}
#[inline]
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant> {
self.formatter.open(&mut self.writer, b'{')?;
self.formatter.comma(&mut self.writer, true)?;
escape_key(&mut self.writer, variant)?;
self.formatter.colon(&mut self.writer)?;
self.serialize_map(Some(len))
}
}
impl<'a, W, F> ser::SerializeSeq for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<()>
where
T: serde::Serialize,
{
self.ser
.formatter
.comma(&mut self.ser.writer, self.state == State::First)?;
self.state = State::Rest;
value.serialize(&mut *self.ser)
}
fn end(self) -> Result<Self::Ok> {
match self.state {
State::Empty => Ok(()),
_ => self.ser.formatter.close(&mut self.ser.writer, b']'),
}
}
}
impl<'a, W, F> ser::SerializeTuple for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<()>
where
T: serde::Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok> {
ser::SerializeSeq::end(self)
}
}
impl<'a, W, F> ser::SerializeTupleStruct for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<()>
where
T: serde::Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok> {
ser::SerializeSeq::end(self)
}
}
impl<'a, W, F> ser::SerializeTupleVariant for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<()>
where
T: serde::Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok> {
match self.state {
State::Empty => {}
_ => self.ser.formatter.close(&mut self.ser.writer, b']')?,
}
self.ser.formatter.close(&mut self.ser.writer, b'}')
}
}
impl<'a, W, F> ser::SerializeMap for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<()>
where
T: serde::Serialize,
{
self.ser
.formatter
.comma(&mut self.ser.writer, self.state == State::First)?;
self.state = State::Rest;
key.serialize(MapKeySerializer { ser: self.ser })?;
self.ser.formatter.colon(&mut self.ser.writer)
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<()>
where
T: serde::Serialize,
{
value.serialize(&mut *self.ser)
}
fn end(self) -> Result<Self::Ok> {
match self.state {
State::Empty => Ok(()),
_ => self.ser.formatter.close(&mut self.ser.writer, b'}'),
}
}
}
impl<'a, W, F> ser::SerializeStruct for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<()>
where
T: serde::Serialize,
{
ser::SerializeMap::serialize_entry(self, key, value)
}
fn end(self) -> Result<Self::Ok> {
ser::SerializeMap::end(self)
}
}
impl<'a, W, F> ser::SerializeStructVariant for Compound<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<()>
where
T: serde::Serialize,
{
ser::SerializeStruct::serialize_field(self, key, value)
}
fn end(self) -> Result<Self::Ok> {
match self.state {
State::Empty => {}
_ => self.ser.formatter.close(&mut self.ser.writer, b'}')?,
}
self.ser.formatter.close(&mut self.ser.writer, b'}')
}
}
struct MapKeySerializer<'a, W: 'a, F: 'a> {
ser: &'a mut Serializer<W, F>,
}
impl<'a, W, F> ser::Serializer for MapKeySerializer<'a, W, F>
where
W: io::Write,
F: Formatter,
{
type Ok = ();
type Error = Error;
#[inline]
fn serialize_str(self, value: &str) -> Result<()> {
escape_key(&mut self.ser.writer, value).map_err(From::from)
}
type SerializeSeq = ser::Impossible<(), Error>;
type SerializeTuple = ser::Impossible<(), Error>;
type SerializeTupleStruct = ser::Impossible<(), Error>;
type SerializeTupleVariant = ser::Impossible<(), Error>;
type SerializeMap = ser::Impossible<(), Error>;
type SerializeStruct = ser::Impossible<(), Error>;
type SerializeStructVariant = ser::Impossible<(), Error>;
fn serialize_bool(self, _value: bool) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_i8(self, _value: i8) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_i16(self, _value: i16) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_i32(self, _value: i32) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_i64(self, _value: i64) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_u8(self, _value: u8) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_u16(self, _value: u16) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_u32(self, _value: u32) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_u64(self, _value: u64) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_f32(self, _value: f32) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_f64(self, _value: f64) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_char(self, _value: char) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_bytes(self, _value: &[u8]) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_unit(self) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_newtype_struct<T>(self, _name: &'static str, _value: &T) -> Result<()>
where
T: ?Sized + ser::Serialize,
{
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_newtype_variant<T>(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_value: &T,
) -> Result<()>
where
T: ?Sized + ser::Serialize,
{
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_none(self) -> Result<()> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_some<T>(self, _value: &T) -> Result<()>
where
T: ?Sized + ser::Serialize,
{
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeStruct> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_tuple_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleStruct> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant> {
Err(Error::Syntax(ErrorCode::KeyMustBeAString, 0, 0))
}
}
/// This trait abstracts away serializing the JSON control characters
pub trait Formatter {
/// Called when serializing a '{' or '['.
fn open<W>(&mut self, writer: &mut W, ch: u8) -> Result<()>
where
W: io::Write;
/// Called when serializing a ','.
fn comma<W>(&mut self, writer: &mut W, first: bool) -> Result<()>
where
W: io::Write;
/// Called when serializing a ':'.
fn colon<W>(&mut self, writer: &mut W) -> Result<()>
where
W: io::Write;
/// Called when serializing a '}' or ']'.
fn close<W>(&mut self, writer: &mut W, ch: u8) -> Result<()>
where
W: io::Write;
/// Newline with indent.
fn newline<W>(&mut self, writer: &mut W, add_indent: i32) -> Result<()>
where
W: io::Write;
/// Start a value.
fn start_value<W>(&mut self, writer: &mut W) -> Result<()>
where
W: io::Write;
}
struct HjsonFormatter<'a> {
current_indent: usize,
current_is_array: bool,
stack: Vec<bool>,
at_colon: bool,
indent: &'a [u8],
braces_same_line: bool,
}
impl<'a> HjsonFormatter<'a> {
/// Construct a formatter that defaults to using two spaces for indentation.
pub fn new() -> Self {
HjsonFormatter::with_indent(b" ")
}
/// Construct a formatter that uses the `indent` string for indentation.
pub fn with_indent(indent: &'a [u8]) -> Self {
HjsonFormatter {
current_indent: 0,
current_is_array: false,
stack: Vec::new(),
at_colon: false,
indent,
braces_same_line: false,
}
}
}
impl<'a> Formatter for HjsonFormatter<'a> {
fn open<W>(&mut self, writer: &mut W, ch: u8) -> Result<()>
where
W: io::Write,
{
if self.current_indent > 0 && !self.current_is_array && !self.braces_same_line {
self.newline(writer, 0)?;
} else {
self.start_value(writer)?;
}
self.current_indent += 1;
self.stack.push(self.current_is_array);
self.current_is_array = ch == b'[';
writer.write_all(&[ch]).map_err(From::from)
}
fn comma<W>(&mut self, writer: &mut W, _: bool) -> Result<()>
where
W: io::Write,
{
writer.write_all(b"\n")?;
indent(writer, self.current_indent, self.indent)
}
fn colon<W>(&mut self, writer: &mut W) -> Result<()>
where
W: io::Write,
{
self.at_colon = !self.braces_same_line;
writer
.write_all(if self.braces_same_line { b": " } else { b":" })
.map_err(From::from)
}
fn close<W>(&mut self, writer: &mut W, ch: u8) -> Result<()>
where
W: io::Write,
{
self.current_indent -= 1;
self.current_is_array = self.stack.pop().expect("Internal error: json parsing");
writer.write_all(b"\n")?;
indent(writer, self.current_indent, self.indent)?;
writer.write_all(&[ch]).map_err(From::from)
}
fn newline<W>(&mut self, writer: &mut W, add_indent: i32) -> Result<()>
where
W: io::Write,
{
self.at_colon = false;
writer.write_all(b"\n")?;
let ii = self.current_indent as i32 + add_indent;
indent(writer, if ii < 0 { 0 } else { ii as usize }, self.indent)
}
fn start_value<W>(&mut self, writer: &mut W) -> Result<()>
where
W: io::Write,
{
if self.at_colon {
self.at_colon = false;
writer.write_all(b" ")?
}
Ok(())
}
}
/// Serializes and escapes a `&[u8]` into a Hjson string.
#[inline]
pub fn escape_bytes<W>(wr: &mut W, bytes: &[u8]) -> Result<()>
where
W: io::Write,
{
wr.write_all(b"\"")?;
let mut start = 0;
for (i, byte) in bytes.iter().enumerate() {
let escaped = match *byte {
b'"' => b"\\\"",
b'\\' => b"\\\\",
b'\x08' => b"\\b",
b'\x0c' => b"\\f",
b'\n' => b"\\n",
b'\r' => b"\\r",
b'\t' => b"\\t",
_ => {
continue;
}
};
if start < i {
wr.write_all(&bytes[start..i])?;
}
wr.write_all(escaped)?;
start = i + 1;
}
if start != bytes.len() {
wr.write_all(&bytes[start..])?;
}
wr.write_all(b"\"")?;
Ok(())
}
/// Serializes and escapes a `&str` into a Hjson string.
#[inline]
pub fn quote_str<W, F>(wr: &mut W, formatter: &mut F, value: &str) -> Result<()>
where
W: io::Write,
F: Formatter,
{
lazy_static! {
// NEEDS_ESCAPE tests if the string can be written without escapes
static ref NEEDS_ESCAPE: Regex = Regex::new("[\\\\\"\x00-\x1f\x7f-\u{9f}\u{00ad}\u{0600}-\u{0604}\u{070f}\u{17b4}\u{17b5}\u{200c}-\u{200f}\u{2028}-\u{202f}\u{2060}-\u{206f}\u{feff}\u{fff0}-\u{ffff}]").expect("Internal error: json parsing");
// NEEDS_QUOTES tests if the string can be written as a quoteless string (includes needsEscape but without \\ and \")
static ref NEEDS_QUOTES: Regex = Regex::new("^\\s|^\"|^'''|^#|^/\\*|^//|^\\{|^\\}|^\\[|^\\]|^:|^,|\\s$|[\x00-\x1f\x7f-\u{9f}\u{00ad}\u{0600}-\u{0604}\u{070f}\u{17b4}\u{17b5}\u{200c}-\u{200f}\u{2028}-\u{202f}\u{2060}-\u{206f}\u{feff}\u{fff0}-\u{ffff}]").expect("Internal error: json parsing");
// NEEDS_ESCAPEML tests if the string can be written as a multiline string (includes needsEscape but without \n, \r, \\ and \")
static ref NEEDS_ESCAPEML: Regex = Regex::new("'''|[\x00-\x09\x0b\x0c\x0e-\x1f\x7f-\u{9f}\u{00ad}\u{0600}-\u{0604}\u{070f}\u{17b4}\u{17b5}\u{200c}-\u{200f}\u{2028}-\u{202f}\u{2060}-\u{206f}\u{feff}\u{fff0}-\u{ffff}]").expect("Internal error: json parsing");
// starts with a keyword and optionally is followed by a comment
static ref STARTS_WITH_KEYWORD: Regex = Regex::new(r#"^(true|false|null)\s*((,|\]|\}|#|//|/\*).*)?$"#).expect("Internal error: json parsing");
}
if value.is_empty() {
formatter.start_value(wr)?;
return escape_bytes(wr, value.as_bytes());
}
// Check if we can insert this string without quotes
// see hjson syntax (must not parse as true, false, null or number)
let mut pn = ParseNumber::new(value.bytes());
let is_number = pn.parse(true).is_ok();
if is_number || NEEDS_QUOTES.is_match(value) || STARTS_WITH_KEYWORD.is_match(value) {
// First check if the string can be expressed in multiline format or
// we must replace the offending characters with safe escape sequences.
if NEEDS_ESCAPE.is_match(value) && !NEEDS_ESCAPEML.is_match(value)
/* && !isRootObject */
{
ml_str(wr, formatter, value)
} else {
formatter.start_value(wr)?;
escape_bytes(wr, value.as_bytes())
}
} else {
// without quotes
formatter.start_value(wr)?;
wr.write_all(value.as_bytes()).map_err(From::from)
}
}
/// Serializes and escapes a `&str` into a multiline Hjson string.
pub fn ml_str<W, F>(wr: &mut W, formatter: &mut F, value: &str) -> Result<()>
where
W: io::Write,
F: Formatter,
{
// wrap the string into the ''' (multiline) format
let a: Vec<&str> = value.split('\n').collect();
if a.len() == 1 {
// The string contains only a single line. We still use the multiline
// format as it avoids escaping the \ character (e.g. when used in a
// regex).
formatter.start_value(wr)?;
wr.write_all(b"'''")?;
wr.write_all(a[0].as_bytes())?;
wr.write_all(b"'''")?
} else {
formatter.newline(wr, 1)?;
wr.write_all(b"'''")?;
for line in a {
formatter.newline(wr, if !line.is_empty() { 1 } else { -999 })?;
wr.write_all(line.as_bytes())?;
}
formatter.newline(wr, 1)?;
wr.write_all(b"'''")?;
}
Ok(())
}
/// Serializes and escapes a `&str` into a Hjson key.
#[inline]
pub fn escape_key<W>(wr: &mut W, value: &str) -> Result<()>
where
W: io::Write,
{
lazy_static! {
static ref NEEDS_ESCAPE_NAME: Regex =
Regex::new(r#"[,\{\[\}\]\s:#"]|//|/\*|'''|^$"#).expect("Internal error: json parsing");
}
// Check if we can insert this name without quotes
if NEEDS_ESCAPE_NAME.is_match(value) {
escape_bytes(wr, value.as_bytes()).map_err(From::from)
} else {
wr.write_all(value.as_bytes()).map_err(From::from)
}
}
#[inline]
fn escape_char<W>(wr: &mut W, value: char) -> Result<()>
where
W: io::Write,
{
// FIXME: this allocation is required in order to be compatible with stable
// rust, which doesn't support encoding a `char` into a stack buffer.
let mut s = String::new();
s.push(value);
escape_bytes(wr, s.as_bytes())
}
fn fmt_f32_or_null<W>(wr: &mut W, value: f32) -> Result<()>
where
W: io::Write,
{
match value.classify() {
FpCategory::Nan | FpCategory::Infinite => wr.write_all(b"null")?,
_ => wr.write_all(fmt_small(value).as_bytes())?,
}
Ok(())
}
fn fmt_f64_or_null<W>(wr: &mut W, value: f64) -> Result<()>
where
W: io::Write,
{
match value.classify() {
FpCategory::Nan | FpCategory::Infinite => wr.write_all(b"null")?,
_ => wr.write_all(fmt_small(value).as_bytes())?,
}
Ok(())
}
fn indent<W>(wr: &mut W, n: usize, s: &[u8]) -> Result<()>
where
W: io::Write,
{
for _ in 0..n {
wr.write_all(s)?;
}
Ok(())
}
// format similar to es6
fn fmt_small<N>(value: N) -> String
where
N: Display + LowerExp,
{
let f1 = value.to_string();
let f2 = format!("{:e}", value);
if f1.len() <= f2.len() + 1 {
f1
} else if !f2.contains("e-") {
f2.replace("e", "e+")
} else {
f2
}
}
/// Encode the specified struct into a Hjson `[u8]` writer.
#[inline]
pub fn to_writer<W, T>(writer: &mut W, value: &T) -> Result<()>
where
W: io::Write,
T: ser::Serialize,
{
let mut ser = Serializer::new(writer);
value.serialize(&mut ser)?;
Ok(())
}
/// Encode the specified struct into a Hjson `[u8]` buffer.
#[inline]
pub fn to_vec<T>(value: &T) -> Result<Vec<u8>>
where
T: ser::Serialize,
{
// We are writing to a Vec, which doesn't fail. So we can ignore
// the error.
let mut writer = Vec::with_capacity(128);
to_writer(&mut writer, value)?;
Ok(writer)
}
/// Encode the specified struct into a Hjson `String` buffer.
#[inline]
pub fn to_string<T>(value: &T) -> Result<String>
where
T: ser::Serialize,
{
let vec = to_vec(value)?;
let string = String::from_utf8(vec)?;
Ok(string)
}
| 27.762977 | 300 | 0.566711 |
230141b03df69b483a7ab51f78b64e06cd928e32 | 58,951 | //! Query planning goes through the following stages:
//! 1. Logical plan produced by DataFusion. Contains table scans of [CubeTableLogical], does not
//! know which physical nodes it has to query.
//! 2. [choose_index] transformation will replace the index and particular partitions to query.
//! It will also place [ClusterSendNode] into the correct place.
//! At this point, the logical plan is finalized, it only scans [CubeTable]s and contains
//! enough information to distribute the plan into workers.
//! 3. We serialize the resulting logical plan into [SerializedPlan] and send it to workers.
//! 4. [CubeQueryPlanner] is used on both the router and the workers to produce a physical plan.
//! Note that workers and the router produce different plans:
//! - Router produces a physical plan that handles the "top" part of the logical plan, above
//! the cluster send.
//! - Workers take only the "bottom" part part of the logical plan, below the cluster send.
//! In addition, workers will replace all table scans of data they do not have with empty
//! results.
//!
//! At this point we also optimize the physical plan to ensure we do as much work as possible
//! on the workers, see [CubeQueryPlanner] for details.
use std::collections::hash_map::RandomState;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use arrow::datatypes::{Field, SchemaRef};
use async_trait::async_trait;
use datafusion::error::DataFusionError;
use datafusion::execution::context::ExecutionContextState;
use datafusion::logical_plan::{DFSchemaRef, Expr, LogicalPlan, UserDefinedLogicalNode};
use datafusion::physical_plan::empty::EmptyExec;
use datafusion::physical_plan::planner::ExtensionPlanner;
use datafusion::physical_plan::{
ExecutionPlan, OptimizerHints, Partitioning, PhysicalPlanner, SendableRecordBatchStream,
};
use flatbuffers::bitflags::_core::any::Any;
use flatbuffers::bitflags::_core::fmt::Formatter;
use itertools::Itertools;
use crate::cluster::Cluster;
use crate::metastore::multi_index::MultiPartition;
use crate::metastore::table::{Table, TablePath};
use crate::metastore::{Chunk, IdRow, Index, MetaStore, Partition, Schema};
use crate::queryplanner::optimizations::rewrite_plan::{rewrite_plan, PlanRewriter};
use crate::queryplanner::partition_filter::PartitionFilter;
use crate::queryplanner::query_executor::{ClusterSendExec, CubeTable};
use crate::queryplanner::serialized_plan::{IndexSnapshot, PartitionSnapshot, SerializedPlan};
use crate::queryplanner::topk::{materialize_topk, plan_topk, ClusterAggregateTopK};
use crate::queryplanner::CubeTableLogical;
use crate::CubeError;
use serde::{Deserialize as SerdeDeser, Deserializer, Serialize as SerdeSer, Serializer};
use serde_derive::Deserialize;
use serde_derive::Serialize;
use std::iter::FromIterator;
#[cfg(test)]
pub async fn choose_index(
p: &LogicalPlan,
metastore: &dyn PlanIndexStore,
) -> Result<(LogicalPlan, PlanningMeta), DataFusionError> {
choose_index_ext(p, metastore, true).await
}
/// Information required to distribute the logical plan into multiple workers.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PlanningMeta {
pub indices: Vec<IndexSnapshot>,
/// Non-empty only if indices point to multi-partitions.
/// Custom serde handlers as flatbuffers can't handle hash maps with integer keys.
#[serde(deserialize_with = "de_vec_as_map")]
#[serde(serialize_with = "se_vec_as_map")]
pub multi_part_subtree: HashMap<u64, MultiPartition>,
}
fn se_vec_as_map<S: Serializer>(m: &HashMap<u64, MultiPartition>, s: S) -> Result<S::Ok, S::Error> {
m.iter().collect_vec().serialize(s)
}
fn de_vec_as_map<'de, D: Deserializer<'de>>(
d: D,
) -> Result<HashMap<u64, MultiPartition>, D::Error> {
Vec::<(u64, MultiPartition)>::deserialize(d).map(HashMap::from_iter)
}
pub async fn choose_index_ext(
p: &LogicalPlan,
metastore: &dyn PlanIndexStore,
enable_topk: bool,
) -> Result<(LogicalPlan, PlanningMeta), DataFusionError> {
// Prepare information to choose the index.
let mut collector = CollectConstraints::default();
rewrite_plan(p, &None, &mut collector)?;
// Consult metastore to choose the index.
// TODO should be single snapshot read to ensure read consistency here
let tables = metastore
.get_tables_with_indexes(
collector
.constraints
.iter()
.map(|c| {
// TODO: use ids.
let schema = c.table.schema.get_row().get_name().clone();
let table = c.table.table.get_row().get_table_name().clone();
(schema, table)
})
.collect_vec(),
)
.await?;
assert_eq!(tables.len(), collector.constraints.len());
let mut candidates = Vec::new();
for (c, inputs) in collector.constraints.iter().zip(tables) {
candidates.push(pick_index(c, inputs.0, inputs.1, inputs.2).await?)
}
// We pick partitioned index only when all tables request the same one.
let mut indices: Vec<_> = match all_have_same_partitioned_index(&candidates) {
true => candidates
.into_iter()
.map(|c| c.partitioned_index.unwrap())
.collect(),
// We sometimes propagate 'index for join not found' error here.
false => candidates
.into_iter()
.map(|c| c.ordinary_index)
.collect::<Result<_, DataFusionError>>()?,
};
// TODO should be single snapshot read to ensure read consistency here
let partitions = metastore
.get_active_partitions_and_chunks_by_index_id_for_select(
indices.iter().map(|i| i.index.get_id()).collect_vec(),
)
.await?;
assert_eq!(partitions.len(), indices.len());
for ((i, c), ps) in indices
.iter_mut()
.zip(collector.constraints.iter())
.zip(partitions)
{
i.partitions = pick_partitions(i, c, ps)?
}
// We have enough information to finalize the logical plan.
let mut r = ChooseIndex {
chosen_indices: &indices,
next_index: 0,
enable_topk,
};
let plan = rewrite_plan(p, &(), &mut r)?;
assert_eq!(r.next_index, indices.len());
let mut multi_parts = Vec::new();
for i in &indices {
for p in &i.partitions {
if let Some(m) = p.partition.get_row().multi_partition_id() {
multi_parts.push(m);
}
}
}
// TODO should be single snapshot read to ensure read consistency here
let multi_part_subtree = metastore.get_multi_partition_subtree(multi_parts).await?;
Ok((
plan,
PlanningMeta {
indices,
multi_part_subtree,
},
))
}
fn all_have_same_partitioned_index(cs: &[IndexCandidate]) -> bool {
if cs.is_empty() {
return true;
}
let multi_index_id = |c: &IndexCandidate| {
c.partitioned_index
.as_ref()
.and_then(|i| i.index.get_row().multi_index_id())
};
let id = match multi_index_id(&cs[0]) {
Some(id) => id,
None => return false,
};
for c in &cs[1..] {
if multi_index_id(c) != Some(id) {
return false;
}
}
return true;
}
#[async_trait]
pub trait PlanIndexStore: Send + Sync {
async fn get_tables_with_indexes(
&self,
inputs: Vec<(String, String)>,
) -> Result<Vec<(IdRow<Schema>, IdRow<Table>, Vec<IdRow<Index>>)>, CubeError>;
async fn get_active_partitions_and_chunks_by_index_id_for_select(
&self,
index_id: Vec<u64>,
) -> Result<Vec<Vec<(IdRow<Partition>, Vec<IdRow<Chunk>>)>>, CubeError>;
async fn get_multi_partition_subtree(
&self,
multi_part_ids: Vec<u64>,
) -> Result<HashMap<u64, MultiPartition>, CubeError>;
}
#[async_trait]
impl<'a> PlanIndexStore for &'a dyn MetaStore {
async fn get_tables_with_indexes(
&self,
inputs: Vec<(String, String)>,
) -> Result<Vec<(IdRow<Schema>, IdRow<Table>, Vec<IdRow<Index>>)>, CubeError> {
MetaStore::get_tables_with_indexes(*self, inputs).await
}
async fn get_active_partitions_and_chunks_by_index_id_for_select(
&self,
index_id: Vec<u64>,
) -> Result<Vec<Vec<(IdRow<Partition>, Vec<IdRow<Chunk>>)>>, CubeError> {
MetaStore::get_active_partitions_and_chunks_by_index_id_for_select(*self, index_id).await
}
async fn get_multi_partition_subtree(
&self,
multi_part_ids: Vec<u64>,
) -> Result<HashMap<u64, MultiPartition>, CubeError> {
MetaStore::get_multi_partition_subtree(*self, multi_part_ids).await
}
}
#[derive(Clone)]
struct SortColumns {
sort_on: Vec<String>,
required: bool,
}
struct IndexConstraints {
sort_on: Option<SortColumns>,
table: TablePath,
projection: Option<Vec<usize>>,
filters: Vec<Expr>,
}
#[derive(Default)]
struct CollectConstraints {
constraints: Vec<IndexConstraints>,
}
impl PlanRewriter for CollectConstraints {
type Context = Option<SortColumns>;
fn rewrite(
&mut self,
n: LogicalPlan,
c: &Self::Context,
) -> Result<LogicalPlan, DataFusionError> {
match &n {
LogicalPlan::TableScan {
projection,
filters,
source,
..
} => {
let table = source.as_any().downcast_ref::<CubeTableLogical>().unwrap();
self.constraints.push(IndexConstraints {
sort_on: c.clone(),
table: table.table.clone(),
projection: projection.clone(),
filters: filters.clone(),
})
}
_ => {}
}
Ok(n)
}
fn enter_node(
&mut self,
n: &LogicalPlan,
_: &Option<SortColumns>,
) -> Option<Option<SortColumns>> {
fn column_name(expr: &Expr) -> Option<String> {
match expr {
Expr::Alias(e, _) => column_name(e),
Expr::Column(col) => Some(col.name.clone()), // TODO use alias
_ => None,
}
}
match n {
LogicalPlan::Aggregate { group_expr, .. } => {
let sort_on = group_expr.iter().map(column_name).collect::<Vec<_>>();
if !sort_on.is_empty() && sort_on.iter().all(|c| c.is_some()) {
Some(Some(SortColumns {
sort_on: sort_on.into_iter().map(|c| c.unwrap()).collect(),
required: false,
}))
} else {
Some(None)
}
}
_ => None,
}
}
fn enter_join_left(
&mut self,
join: &LogicalPlan,
_: &Option<SortColumns>,
) -> Option<Option<SortColumns>> {
let join_on;
if let LogicalPlan::Join { on, .. } = join {
join_on = on;
} else {
panic!("expected join node");
}
Some(Some(SortColumns {
sort_on: join_on.iter().map(|(l, _)| l.name.clone()).collect(),
required: true,
}))
}
fn enter_join_right(
&mut self,
join: &LogicalPlan,
_c: &Self::Context,
) -> Option<Self::Context> {
let join_on;
if let LogicalPlan::Join { on, .. } = join {
join_on = on;
} else {
panic!("expected join node");
}
Some(Some(SortColumns {
sort_on: join_on.iter().map(|(_, r)| r.name.clone()).collect(),
required: true,
}))
}
}
struct ChooseIndex<'a> {
next_index: usize,
chosen_indices: &'a [IndexSnapshot],
enable_topk: bool,
}
impl PlanRewriter for ChooseIndex<'_> {
type Context = ();
fn rewrite(
&mut self,
n: LogicalPlan,
_: &Self::Context,
) -> Result<LogicalPlan, DataFusionError> {
let p = self.choose_table_index(n)?;
let mut p = pull_up_cluster_send(p)?;
if self.enable_topk {
p = materialize_topk(p)?;
}
Ok(p)
}
}
fn try_extract_cluster_send(p: &LogicalPlan) -> Option<&ClusterSendNode> {
if let LogicalPlan::Extension { node } = p {
return node.as_any().downcast_ref::<ClusterSendNode>();
}
return None;
}
impl ChooseIndex<'_> {
fn choose_table_index(&mut self, mut p: LogicalPlan) -> Result<LogicalPlan, DataFusionError> {
match &mut p {
LogicalPlan::TableScan { source, .. } => {
assert!(
self.next_index < self.chosen_indices.len(),
"inconsistent state"
);
let table = &source
.as_any()
.downcast_ref::<CubeTableLogical>()
.unwrap()
.table;
assert_eq!(
table.table.get_id(),
self.chosen_indices[self.next_index]
.table_path
.table
.get_id()
);
let snapshot = self.chosen_indices[self.next_index].clone();
self.next_index += 1;
let table_schema = source.schema();
*source = Arc::new(CubeTable::try_new(
snapshot.clone(),
// Filled by workers
HashMap::new(),
Vec::new(),
)?);
let index_schema = source.schema();
assert_eq!(table_schema, index_schema);
return Ok(ClusterSendNode {
input: Arc::new(p),
snapshots: vec![vec![snapshot]],
}
.into_plan());
}
_ => return Ok(p),
}
}
}
struct IndexCandidate {
/// May contain for unmatched index.
pub ordinary_index: Result<IndexSnapshot, DataFusionError>,
pub partitioned_index: Option<IndexSnapshot>,
}
// Picks the index, but not partitions snapshots.
async fn pick_index(
c: &IndexConstraints,
schema: IdRow<Schema>,
table: IdRow<Table>,
indices: Vec<IdRow<Index>>,
) -> Result<IndexCandidate, DataFusionError> {
let sort_on = c.sort_on.as_ref().map(|sc| (&sc.sort_on, sc.required));
let mut indices = indices.into_iter();
let default_index = indices.next().expect("no default index");
let (index, mut partitioned_index, sort_on) = if let Some(projection_column_indices) =
&c.projection
{
let projection_columns = CubeTable::project_to_table(&table, &projection_column_indices);
let mut partitioned_index = None;
let mut ordinary_index = None;
let mut ordinary_score = usize::MAX;
for i in indices {
if let Some((join_on_columns, _)) = sort_on.as_ref() {
// TODO: join_on_columns may be larger than sort_key_size of the index.
let join_columns_in_index = join_on_columns
.iter()
.map(|c| {
i.get_row()
.get_columns()
.iter()
.find(|ic| ic.get_name().as_str() == c.as_str())
.cloned()
})
.collect::<Option<Vec<_>>>();
let join_columns_in_index = match join_columns_in_index {
None => continue,
Some(c) => c,
};
let join_columns_indices =
CubeTable::project_to_index_positions(&join_columns_in_index, &i);
let matches = join_columns_indices
.iter()
.enumerate()
.all(|(i, col_i)| Some(i) == *col_i);
if !matches {
continue;
}
}
let projected_index_positions =
CubeTable::project_to_index_positions(&projection_columns, &i);
let score = projected_index_positions
.into_iter()
.fold_options(0, |a, b| a + b);
if let Some(score) = score {
if i.get_row().multi_index_id().is_some() {
debug_assert!(partitioned_index.is_none());
partitioned_index = Some(i);
continue;
}
if score < ordinary_score {
ordinary_index = Some(i);
ordinary_score = score;
}
}
}
if let Some(index) = ordinary_index {
(Ok(index), partitioned_index, sort_on)
} else {
if let Some((join_on_columns, true)) = sort_on.as_ref() {
let table_name = c.table.table_name();
let err = Err(DataFusionError::Plan(format!(
"Can't find index to join table {} on {}. Consider creating index: CREATE INDEX {}_{} ON {} ({})",
table_name,
join_on_columns.join(", "),
table.get_row().get_table_name(),
join_on_columns.join("_"),
table_name,
join_on_columns.join(", ")
)));
(err, partitioned_index, sort_on)
} else {
(Ok(default_index), partitioned_index, None)
}
}
} else {
if let Some((join_on_columns, _)) = sort_on {
return Err(DataFusionError::Plan(format!(
"Can't find index to join table {} on {} and projection push down optimization has been disabled. Invalid state.",
c.table.table_name(),
join_on_columns.join(", ")
)));
}
(Ok(default_index), None, None)
};
// Only use partitioned index for joins. Joins are indicated by the required flag.
if !sort_on
.as_ref()
.map(|(_, required)| *required)
.unwrap_or(false)
{
partitioned_index = None;
}
let schema = Arc::new(schema);
let create_snapshot = |index| {
IndexSnapshot {
index,
partitions: Vec::new(), // filled with results of `pick_partitions` later.
table_path: TablePath {
table: table.clone(),
schema: schema.clone(),
},
sort_on: sort_on.as_ref().map(|(cols, _)| (*cols).clone()),
}
};
Ok(IndexCandidate {
ordinary_index: index.map(create_snapshot),
partitioned_index: partitioned_index.map(create_snapshot),
})
}
fn pick_partitions(
i: &IndexSnapshot,
c: &IndexConstraints,
partitions: Vec<(IdRow<Partition>, Vec<IdRow<Chunk>>)>,
) -> Result<Vec<PartitionSnapshot>, DataFusionError> {
let partition_filter = PartitionFilter::extract(&partition_filter_schema(&i.index), &c.filters);
log::trace!("Extracted partition filter is {:?}", partition_filter);
let candidate_partitions = partitions.len();
let mut pruned_partitions = 0;
let mut partition_snapshots = Vec::new();
for (partition, chunks) in partitions.into_iter() {
let min_row = partition
.get_row()
.get_min_val()
.as_ref()
.map(|r| r.values().as_slice());
let max_row = partition
.get_row()
.get_max_val()
.as_ref()
.map(|r| r.values().as_slice());
if !partition_filter.can_match(min_row, max_row) {
pruned_partitions += 1;
continue;
}
partition_snapshots.push(PartitionSnapshot { chunks, partition });
}
log::trace!(
"Pruned {} of {} partitions",
pruned_partitions,
candidate_partitions
);
Ok(partition_snapshots)
}
fn partition_filter_schema(index: &IdRow<Index>) -> arrow::datatypes::Schema {
let schema_fields: Vec<Field>;
schema_fields = index
.get_row()
.columns()
.iter()
.map(|c| c.clone().into())
.take(index.get_row().sort_key_size() as usize)
.collect();
arrow::datatypes::Schema::new(schema_fields)
}
#[derive(Debug, Clone)]
pub struct ClusterSendNode {
pub input: Arc<LogicalPlan>,
pub snapshots: Vec<Vec<IndexSnapshot>>,
}
impl ClusterSendNode {
pub fn into_plan(self) -> LogicalPlan {
LogicalPlan::Extension {
node: Arc::new(self),
}
}
}
impl UserDefinedLogicalNode for ClusterSendNode {
fn as_any(&self) -> &dyn Any {
self
}
fn inputs(&self) -> Vec<&LogicalPlan> {
vec![self.input.as_ref()]
}
fn schema(&self) -> &DFSchemaRef {
self.input.schema()
}
fn expressions(&self) -> Vec<Expr> {
vec![]
}
fn prevent_predicate_push_down_columns(&self) -> HashSet<String, RandomState> {
HashSet::new()
}
fn fmt_for_explain(&self, f: &mut Formatter<'a>) -> std::fmt::Result {
write!(f, "ClusterSend")
}
fn from_template(
&self,
exprs: &[Expr],
inputs: &[LogicalPlan],
) -> Arc<dyn UserDefinedLogicalNode + Send + Sync> {
assert!(exprs.is_empty());
assert_eq!(inputs.len(), 1);
Arc::new(ClusterSendNode {
input: Arc::new(inputs[0].clone()),
snapshots: self.snapshots.clone(),
})
}
}
fn pull_up_cluster_send(mut p: LogicalPlan) -> Result<LogicalPlan, DataFusionError> {
let snapshots;
match &mut p {
// These nodes have no children, return unchanged.
LogicalPlan::TableScan { .. }
| LogicalPlan::EmptyRelation { .. }
| LogicalPlan::CreateExternalTable { .. }
| LogicalPlan::Explain { .. } => return Ok(p),
// The ClusterSend itself, return unchanged.
LogicalPlan::Extension { .. } => return Ok(p),
// These nodes collect results from multiple partitions, return unchanged.
LogicalPlan::Aggregate { .. }
| LogicalPlan::Sort { .. }
| LogicalPlan::Limit { .. }
| LogicalPlan::Skip { .. }
| LogicalPlan::Repartition { .. } => return Ok(p),
// We can always pull cluster send for these nodes.
LogicalPlan::Projection { input, .. } | LogicalPlan::Filter { input, .. } => {
let send;
if let Some(s) = try_extract_cluster_send(input) {
send = s;
} else {
return Ok(p);
}
snapshots = send.snapshots.clone();
// Code after 'match' will wrap `p` in ClusterSend.
*input = send.input.clone();
}
LogicalPlan::Union { inputs, .. } => {
// Handle UNION over constants, e.g. inline data series.
if inputs.iter().all(|p| try_extract_cluster_send(p).is_none()) {
return Ok(p);
}
let mut union_snapshots = Vec::new();
for i in inputs {
let send;
if let Some(s) = try_extract_cluster_send(i) {
send = s;
} else {
return Err(DataFusionError::Plan(
"UNION argument not supported".to_string(),
));
}
union_snapshots.extend(send.snapshots.concat());
// Code after 'match' will wrap `p` in ClusterSend.
*i = send.input.as_ref().clone();
}
snapshots = vec![union_snapshots];
}
LogicalPlan::Join { left, right, .. } => {
let lsend;
let rsend;
if let (Some(l), Some(r)) = (
try_extract_cluster_send(left),
try_extract_cluster_send(right),
) {
lsend = l;
rsend = r;
} else {
return Err(DataFusionError::Plan(
"JOIN argument not supported".to_string(),
));
}
snapshots = lsend
.snapshots
.iter()
.chain(rsend.snapshots.iter())
.cloned()
.collect();
// Code after 'match' will wrap `p` in ClusterSend.
*left = lsend.input.clone();
*right = rsend.input.clone();
}
LogicalPlan::Window { .. } | LogicalPlan::CrossJoin { .. } => {
return Err(DataFusionError::Internal(
"unsupported operation".to_string(),
))
}
}
Ok(ClusterSendNode {
input: Arc::new(p),
snapshots,
}
.into_plan())
}
pub struct CubeExtensionPlanner {
pub cluster: Option<Arc<dyn Cluster>>,
pub serialized_plan: Arc<SerializedPlan>,
}
impl ExtensionPlanner for CubeExtensionPlanner {
fn plan_extension(
&self,
planner: &dyn PhysicalPlanner,
node: &dyn UserDefinedLogicalNode,
_logical_inputs: &[&LogicalPlan],
physical_inputs: &[Arc<dyn ExecutionPlan>],
state: &ExecutionContextState,
) -> Result<Option<Arc<dyn ExecutionPlan>>, DataFusionError> {
let inputs = physical_inputs;
if let Some(cs) = node.as_any().downcast_ref::<ClusterSendNode>() {
assert_eq!(inputs.len(), 1);
let input = inputs.into_iter().next().unwrap();
Ok(Some(self.plan_cluster_send(
input.clone(),
&cs.snapshots,
input.schema(),
false,
usize::MAX,
)?))
} else if let Some(topk) = node.as_any().downcast_ref::<ClusterAggregateTopK>() {
assert_eq!(inputs.len(), 1);
let input = inputs.into_iter().next().unwrap();
Ok(Some(plan_topk(planner, self, topk, input.clone(), state)?))
} else {
Ok(None)
}
}
}
impl CubeExtensionPlanner {
pub fn plan_cluster_send(
&self,
input: Arc<dyn ExecutionPlan>,
snapshots: &Vec<Vec<IndexSnapshot>>,
schema: SchemaRef,
use_streaming: bool,
max_batch_rows: usize,
) -> Result<Arc<dyn ExecutionPlan>, DataFusionError> {
if snapshots.is_empty() {
return Ok(Arc::new(EmptyExec::new(false, schema)));
}
// Note that MergeExecs are added automatically when needed.
if let Some(c) = self.cluster.as_ref() {
Ok(Arc::new(ClusterSendExec::new(
schema,
c.clone(),
self.serialized_plan.clone(),
snapshots,
input,
use_streaming,
)))
} else {
Ok(Arc::new(WorkerExec {
input,
schema,
max_batch_rows,
}))
}
}
}
/// Produced on the worker, marks the subplan that the worker must execute. Anything above is the
/// router part of the plan and must be ignored.
#[derive(Debug)]
pub struct WorkerExec {
pub input: Arc<dyn ExecutionPlan>,
// TODO: remove and use `self.input.schema()`
// This is a hacky workaround for wrong schema of joins after projection pushdown.
pub schema: SchemaRef,
pub max_batch_rows: usize,
}
#[async_trait]
impl ExecutionPlan for WorkerExec {
fn as_any(&self) -> &dyn Any {
self
}
fn schema(&self) -> SchemaRef {
self.schema.clone()
}
fn output_partitioning(&self) -> Partitioning {
self.input.output_partitioning()
}
fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
vec![self.input.clone()]
}
fn with_new_children(
&self,
children: Vec<Arc<dyn ExecutionPlan>>,
) -> Result<Arc<dyn ExecutionPlan>, DataFusionError> {
assert_eq!(children.len(), 1);
Ok(Arc::new(WorkerExec {
input: children.into_iter().next().unwrap(),
schema: self.schema.clone(),
max_batch_rows: self.max_batch_rows,
}))
}
fn output_hints(&self) -> OptimizerHints {
self.input.output_hints()
}
async fn execute(
&self,
partition: usize,
) -> Result<SendableRecordBatchStream, DataFusionError> {
self.input.execute(partition).await
}
}
/// Use this to pick the part of the plan that the worker must execute.
pub fn get_worker_plan(
p: &Arc<dyn ExecutionPlan>,
) -> Option<(Arc<dyn ExecutionPlan>, /*max_batch_rows*/ usize)> {
if let Some(p) = p.as_any().downcast_ref::<WorkerExec>() {
return Some((p.input.clone(), p.max_batch_rows));
} else {
let children = p.children();
// We currently do not split inside joins or leaf nodes.
if children.len() != 1 {
return None;
} else {
return get_worker_plan(&children[0]);
}
}
}
#[cfg(test)]
pub mod tests {
use std::sync::Arc;
use arrow::datatypes::Schema as ArrowSchema;
use async_trait::async_trait;
use datafusion::datasource::TableProvider;
use datafusion::execution::context::ExecutionContext;
use datafusion::logical_plan::LogicalPlan;
use datafusion::physical_plan::udaf::AggregateUDF;
use datafusion::physical_plan::udf::ScalarUDF;
use datafusion::sql::parser::Statement as DFStatement;
use datafusion::sql::planner::{ContextProvider, SqlToRel};
use itertools::Itertools;
use pretty_assertions::assert_eq;
use crate::config::Config;
use crate::metastore::multi_index::MultiPartition;
use crate::metastore::table::{Table, TablePath};
use crate::metastore::{Chunk, Column, ColumnType, IdRow, Index, Partition, Schema};
use crate::queryplanner::planning::{choose_index, try_extract_cluster_send, PlanIndexStore};
use crate::queryplanner::pretty_printers::PPOptions;
use crate::queryplanner::query_executor::ClusterSendExec;
use crate::queryplanner::serialized_plan::RowRange;
use crate::queryplanner::{pretty_printers, CubeTableLogical};
use crate::sql::parser::{CubeStoreParser, Statement};
use crate::table::{Row, TableValue};
use crate::CubeError;
use datafusion::catalog::TableReference;
use std::collections::HashMap;
use std::iter::FromIterator;
#[tokio::test]
pub async fn test_choose_index() {
let indices = default_indices();
let plan = initial_plan("SELECT * FROM s.Customers WHERE customer_id = 1", &indices);
assert_eq!(
pretty_printers::pp_plan(&plan),
"Projection, [s.Customers.customer_id, s.Customers.customer_name, s.Customers.customer_city, s.Customers.customer_registered_date]\
\n Filter\
\n Scan s.Customers, source: CubeTableLogical, fields: *"
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(
pretty_printers::pp_plan(&plan),
"ClusterSend, indices: [[0]]\
\n Projection, [s.Customers.customer_id, s.Customers.customer_name, s.Customers.customer_city, s.Customers.customer_registered_date]\
\n Filter\
\n Scan s.Customers, source: CubeTable(index: default:0:[]), fields: *"
);
// Should prefer a non-default index for joins.
let plan = initial_plan(
"SELECT order_id, order_amount, customer_name \
FROM s.Orders \
JOIN s.Customers ON order_customer = customer_id",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(pretty_printers::pp_plan(&plan), "ClusterSend, indices: [[3], [0]]\
\n Projection, [s.Orders.order_id, s.Orders.order_amount, s.Customers.customer_name]\
\n Join on: [#s.Orders.order_customer = #s.Customers.customer_id]\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_id, order_customer, order_amount]\
\n Scan s.Customers, source: CubeTable(index: default:0:[]:sort_on[customer_id]), fields: [customer_id, customer_name]");
let plan = initial_plan(
"SELECT order_id, customer_name, product_name \
FROM s.Orders \
JOIN s.Customers on order_customer = customer_id \
JOIN s.Products ON order_product = product_id",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(pretty_printers::pp_plan(&plan), "ClusterSend, indices: [[3], [0], [5]]\
\n Projection, [s.Orders.order_id, s.Customers.customer_name, s.Products.product_name]\
\n Join on: [#s.Orders.order_product = #s.Products.product_id]\
\n Join on: [#s.Orders.order_customer = #s.Customers.customer_id]\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_id, order_customer, order_product]\
\n Scan s.Customers, source: CubeTable(index: default:0:[]:sort_on[customer_id]), fields: [customer_id, customer_name]\
\n Scan s.Products, source: CubeTable(index: default:5:[]:sort_on[product_id]), fields: *");
let plan = initial_plan(
"SELECT c2.customer_name \
FROM s.Orders \
JOIN s.Customers c1 on order_customer = c1.customer_id \
JOIN s.Customers c2 ON order_city = c2.customer_city \
WHERE c1.customer_name = 'Customer 1'",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(pretty_printers::pp_plan(&plan), "ClusterSend, indices: [[3], [0], [1]]\
\n Projection, [c2.customer_name]\
\n Join on: [#s.Orders.order_city = #c2.customer_city]\
\n Join on: [#s.Orders.order_customer = #c1.customer_id]\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_customer, order_city]\
\n Filter\
\n Scan c1, source: CubeTable(index: default:0:[]:sort_on[customer_id]), fields: [customer_id, customer_name]\
\n Scan c2, source: CubeTable(index: by_city:1:[]:sort_on[customer_city]), fields: [customer_name, customer_city]");
}
#[tokio::test]
pub async fn test_materialize_topk() {
let indices = default_indices();
let plan = initial_plan(
"SELECT order_customer, SUM(order_amount) FROM s.Orders \
GROUP BY 1 ORDER BY 2 DESC LIMIT 10",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(
pretty_printers::pp_plan(&plan),
"Projection, [s.Orders.order_customer, SUM(s.Orders.order_amount)]\
\n ClusterAggregateTopK, limit: 10\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_customer, order_amount]"
);
// Projections should be handled properly.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 2 DESC LIMIT 10",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(
pretty_printers::pp_plan(&plan),
"Projection, [customer, amount]\
\n ClusterAggregateTopK, limit: 10\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_customer, order_amount]"
);
let plan = initial_plan(
"SELECT SUM(order_amount) `amount`, order_customer `customer` FROM s.Orders \
GROUP BY 2 ORDER BY 1 DESC LIMIT 10",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
let mut with_sort_by = PPOptions::default();
with_sort_by.show_sort_by = true;
assert_eq!(
pretty_printers::pp_plan_ext(&plan, &with_sort_by),
"Projection, [amount, customer]\
\n ClusterAggregateTopK, limit: 10, sortBy: [2 desc null last]\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_customer, order_amount]"
);
// Ascending order is also ok.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 2 ASC LIMIT 10",
&indices,
);
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(
pretty_printers::pp_plan_ext(&plan, &with_sort_by),
"Projection, [customer, amount]\
\n ClusterAggregateTopK, limit: 10, sortBy: [2 null last]\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_customer, order_amount]"
);
// MAX and MIN are ok, as well as multiple aggregation.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount`, \
MIN(order_amount) `min_amount`, MAX(order_amount) `max_amount` \
FROM s.Orders \
GROUP BY 1 ORDER BY 3 DESC, 2 ASC LIMIT 10",
&indices,
);
let mut verbose = with_sort_by;
verbose.show_aggregations = true;
let plan = choose_index(&plan, &indices).await.unwrap().0;
assert_eq!(
pretty_printers::pp_plan_ext(&plan, &verbose),
"Projection, [customer, amount, min_amount, max_amount]\
\n ClusterAggregateTopK, limit: 10, aggs: [SUM(#s.Orders.order_amount), MIN(#s.Orders.order_amount), MAX(#s.Orders.order_amount)], sortBy: [3 desc null last, 2 null last]\
\n Scan s.Orders, source: CubeTable(index: by_customer:3:[]:sort_on[order_customer]), fields: [order_customer, order_amount]"
);
// Should not introduce TopK by mistake in unsupported cases.
// No 'order by'.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount` FROM s.Orders \
GROUP BY 1 LIMIT 10",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
// No limit.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 2 DESC",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
// Sort by group key, not the aggregation result.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 1 DESC LIMIT 10",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
// Unsupported aggregation function.
let plan = initial_plan(
"SELECT order_customer `customer`, AVG(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 2 DESC LIMIT 10",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
let plan = initial_plan(
"SELECT order_customer `customer`, COUNT(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 2 DESC LIMIT 10",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
// Distinct aggregations.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(DISTINCT order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY 2 DESC LIMIT 10",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
// Complicated sort expressions.
let plan = initial_plan(
"SELECT order_customer `customer`, SUM(order_amount) `amount` FROM s.Orders \
GROUP BY 1 ORDER BY amount * amount DESC LIMIT 10",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert!(!pp.contains("TopK"), "plan contained topk:\n{}", pp);
}
#[tokio::test]
pub async fn test_partitioned_index_join() {
let mut indices = indices_with_partitioned_index();
let plan = initial_plan(
"SELECT customer_name, order_city FROM s.Orders JOIN s.Customers \
ON order_customer = customer_id",
&indices,
);
let pp = pretty_printers::pp_plan(&choose_index(&plan, &indices).await.unwrap().0);
assert_eq!(pp, "ClusterSend, indices: [[6], [2]]\
\n Projection, [s.Customers.customer_name, s.Orders.order_city]\
\n Join on: [#s.Orders.order_customer = #s.Customers.customer_id]\
\n Scan s.Orders, source: CubeTable(index: #mi0:6:[]:sort_on[order_customer]), fields: [order_customer, order_city]\
\n Scan s.Customers, source: CubeTable(index: #mi0:2:[]:sort_on[customer_id]), fields: [customer_id, customer_name]");
// Add some multi-partitions and validate how it runs.
indices
.multi_partitions
.push(MultiPartition::new_root(0).set_active(false));
indices.multi_partitions.push(
MultiPartition::new_child(
&indices.get_multi_partition(0),
None,
Some(Row::new(vec![TableValue::Int(100)])),
)
.set_active(false),
);
indices.multi_partitions.push(
MultiPartition::new_child(
&indices.get_multi_partition(0),
Some(Row::new(vec![TableValue::Int(100)])),
None,
)
.set_active(true),
);
indices.multi_partitions.push(
MultiPartition::new_child(
&indices.get_multi_partition(1),
None,
Some(Row::new(vec![TableValue::Int(25)])),
)
.set_active(true),
);
indices.multi_partitions.push(
MultiPartition::new_child(
&indices.get_multi_partition(1),
Some(Row::new(vec![TableValue::Int(25)])),
Some(Row::new(vec![TableValue::Int(100)])),
)
.set_active(true),
);
for i in 0..indices.indices.len() {
// This name marks indices for multi-index.
if indices.indices[i].get_name() == "#mi0" {
add_multipart_partitions(
i as u64,
&indices.multi_partitions,
&mut indices.partitions,
);
}
}
for p in 0..indices.partitions.len() {
indices
.chunks
.push(Chunk::new(p as u64, 123, false).set_uploaded(true));
}
// Plan again.
let (with_index, meta) = choose_index(&plan, &indices).await.unwrap();
let pp = pretty_printers::pp_plan(&with_index);
assert_eq!(pp, "ClusterSend, indices: [[6], [2]]\
\n Projection, [s.Customers.customer_name, s.Orders.order_city]\
\n Join on: [#s.Orders.order_customer = #s.Customers.customer_id]\
\n Scan s.Orders, source: CubeTable(index: #mi0:6:[5, 6, 7, 8, 9]:sort_on[order_customer]), fields: [order_customer, order_city]\
\n Scan s.Customers, source: CubeTable(index: #mi0:2:[0, 1, 2, 3, 4]:sort_on[customer_id]), fields: [customer_id, customer_name]");
let c = Config::test("partitioned_index_join").update_config(|mut c| {
c.server_name = "router".to_string();
c.select_workers = vec!["worker1".to_string(), "worker2".to_string()];
c
});
let cs = &try_extract_cluster_send(&with_index).unwrap().snapshots;
let assigned = ClusterSendExec::distribute_to_workers(
c.config_obj().as_ref(),
&cs,
&meta.multi_part_subtree,
);
let part = |id: u64, start: Option<i64>, end: Option<i64>| {
let start = start.map(|i| Row::new(vec![TableValue::Int(i)]));
let end = end.map(|i| Row::new(vec![TableValue::Int(i)]));
(id, RowRange { start, end })
};
assert_eq!(
assigned,
vec![
(
"worker1".to_string(),
vec![
part(2, None, None),
part(7, None, None),
part(0, Some(100), None),
part(5, Some(100), None),
part(3, None, None),
part(8, None, None),
part(1, None, Some(25)),
part(6, None, Some(25)),
part(0, None, Some(25)),
part(5, None, Some(25)),
]
),
(
"worker2".to_string(),
vec![
part(4, None, None),
part(9, None, None),
part(1, Some(25), Some(100)),
part(6, Some(25), Some(100)),
part(0, Some(25), Some(100)),
part(5, Some(25), Some(100)),
]
)
]
);
}
fn default_indices() -> TestIndices {
make_test_indices(false)
}
fn indices_with_partitioned_index() -> TestIndices {
make_test_indices(true)
}
fn add_multipart_partitions(
index_id: u64,
multi_parts: &[MultiPartition],
partitions: &mut Vec<Partition>,
) {
let first_part_i = partitions.len() as u64;
for i in 0..multi_parts.len() {
let mp = &multi_parts[i];
let mut p = Partition::new(
index_id,
Some(i as u64),
mp.min_row().cloned(),
mp.max_row().cloned(),
);
if let Some(parent) = mp.parent_multi_partition_id() {
assert!(parent <= multi_parts.len() as u64);
p = p.update_parent_partition_id(Some(first_part_i + parent));
}
if mp.active() {
p = p.to_warmed_up().to_active(true);
}
partitions.push(p);
}
}
/// Most tests in this module use this schema.
fn make_test_indices(add_multi_indices: bool) -> TestIndices {
const SCHEMA: u64 = 0;
const PARTITIONED_INDEX: u64 = 0; // Only 1 partitioned index for now.
let mut i = TestIndices::default();
let customers_cols = int_columns(&[
"customer_id",
"customer_name",
"customer_city",
"customer_registered_date",
]);
let customers = i.add_table(Table::new(
"Customers".to_string(),
SCHEMA,
customers_cols.clone(),
None,
None,
true,
None,
None,
None,
));
i.indices.push(
Index::try_new(
"by_city".to_string(),
customers,
put_first("customer_city", &customers_cols),
1,
None,
None,
)
.unwrap(),
);
if add_multi_indices {
i.indices.push(
Index::try_new(
"#mi0".to_string(),
customers,
put_first("customer_id", &customers_cols),
1,
None,
Some(PARTITIONED_INDEX),
)
.unwrap(),
);
}
let orders_cols = int_columns(&[
"order_id",
"order_customer",
"order_product",
"order_amount",
"order_city",
]);
let orders = i.add_table(Table::new(
"Orders".to_string(),
SCHEMA,
orders_cols.clone(),
None,
None,
true,
None,
None,
None,
));
i.indices.push(
Index::try_new(
"by_customer".to_string(),
orders,
put_first("order_customer", &orders_cols),
2,
None,
None,
)
.unwrap(),
);
i.indices.push(
Index::try_new(
"by_city".to_string(),
customers,
put_first("order_city", &orders_cols),
2,
None,
None,
)
.unwrap(),
);
if add_multi_indices {
i.indices.push(
Index::try_new(
"#mi0".to_string(),
orders,
put_first("order_customer", &orders_cols),
1,
None,
Some(PARTITIONED_INDEX),
)
.unwrap(),
);
}
i.add_table(Table::new(
"Products".to_string(),
SCHEMA,
int_columns(&["product_id", "product_name"]),
None,
None,
true,
None,
None,
None,
));
i
}
fn put_first(c: &str, cols: &[Column]) -> Vec<Column> {
let mut cols = cols.iter().cloned().collect_vec();
let pos = cols.iter().position(|col| col.get_name() == c).unwrap();
cols.swap(0, pos);
cols
}
fn int_columns(names: &[&str]) -> Vec<Column> {
names
.iter()
.enumerate()
.map(|(i, n)| Column::new(n.to_string(), ColumnType::Int, i))
.collect()
}
fn initial_plan(s: &str, i: &TestIndices) -> LogicalPlan {
let statement;
if let Statement::Statement(s) = CubeStoreParser::new(s).unwrap().parse_statement().unwrap()
{
statement = s;
} else {
panic!("not a statement")
}
let plan = SqlToRel::new(i)
.statement_to_plan(&DFStatement::Statement(statement))
.unwrap();
ExecutionContext::new().optimize(&plan).unwrap()
}
#[derive(Debug, Default)]
pub struct TestIndices {
tables: Vec<Table>,
indices: Vec<Index>,
partitions: Vec<Partition>,
chunks: Vec<Chunk>,
multi_partitions: Vec<MultiPartition>,
}
impl TestIndices {
pub fn add_table(&mut self, t: Table) -> u64 {
assert_eq!(t.get_schema_id(), 0);
let table_id = self.tables.len() as u64;
self.indices.push(
Index::try_new(
"default".to_string(),
table_id,
t.get_columns().clone(),
t.get_columns().len() as u64,
None,
None,
)
.unwrap(),
);
self.tables.push(t);
table_id
}
pub fn get_multi_partition(&self, id: u64) -> IdRow<MultiPartition> {
IdRow::new(id, self.multi_partitions[id as usize].clone())
}
pub fn chunks_for_partition(&self, partition_id: u64) -> Vec<IdRow<Chunk>> {
let mut r = Vec::new();
for i in 0..self.chunks.len() {
if self.chunks[i].get_partition_id() != partition_id {
continue;
}
r.push(IdRow::new(i as u64, self.chunks[i].clone()));
}
r
}
pub fn schema(&self) -> IdRow<Schema> {
IdRow::new(0, Schema::new("s".to_string()))
}
}
impl ContextProvider for TestIndices {
fn get_table_provider(&self, name: TableReference) -> Option<Arc<dyn TableProvider>> {
let name = match name {
TableReference::Partial { schema, table } => {
if schema != "s" {
return None;
}
table
}
TableReference::Bare { .. } | TableReference::Full { .. } => return None,
};
self.tables
.iter()
.find_position(|t| t.get_table_name() == name)
.map(|(id, t)| -> Arc<dyn TableProvider> {
let schema = Arc::new(ArrowSchema::new(
t.get_columns()
.iter()
.map(|c| c.clone().into())
.collect::<Vec<_>>(),
));
Arc::new(CubeTableLogical {
table: TablePath {
table: IdRow::new(id as u64, t.clone()),
schema: Arc::new(self.schema()),
},
schema,
})
})
}
fn get_function_meta(&self, _name: &str) -> Option<Arc<ScalarUDF>> {
// Note that this is missing HLL functions.
None
}
fn get_aggregate_meta(&self, _name: &str) -> Option<Arc<AggregateUDF>> {
// Note that this is missing HLL functions.
None
}
}
#[async_trait]
impl PlanIndexStore for TestIndices {
async fn get_tables_with_indexes(
&self,
inputs: Vec<(String, String)>,
) -> Result<Vec<(IdRow<Schema>, IdRow<Table>, Vec<IdRow<Index>>)>, CubeError> {
let mut r = Vec::with_capacity(inputs.len());
for (schema, table) in inputs {
let table = self.get_table(schema, table).await?;
let schema = self
.get_schema_by_id(table.get_row().get_schema_id())
.await?;
let mut indexes;
indexes = self.get_table_indexes(table.get_id()).await?;
indexes.insert(0, self.get_default_index(table.get_id()).await?);
r.push((schema, table, indexes))
}
Ok(r)
}
async fn get_active_partitions_and_chunks_by_index_id_for_select(
&self,
index_id: Vec<u64>,
) -> Result<Vec<Vec<(IdRow<Partition>, Vec<IdRow<Chunk>>)>>, CubeError> {
Ok(index_id
.iter()
.map(|index_id| {
self.partitions
.iter()
.enumerate()
.filter(|(_, p)| p.get_index_id() == *index_id)
.map(|(id, p)| {
(
IdRow::new(id as u64, p.clone()),
self.chunks_for_partition(id as u64),
)
})
.filter(|(p, chunks)| p.get_row().is_active() || !chunks.is_empty())
.collect()
})
.collect())
}
async fn get_multi_partition_subtree(
&self,
_multi_part_ids: Vec<u64>,
) -> Result<HashMap<u64, MultiPartition>, CubeError> {
Ok(HashMap::from_iter(
self.multi_partitions
.iter()
.enumerate()
.map(|(i, p)| (i as u64, p.clone())),
))
}
}
impl TestIndices {
async fn get_table(
&self,
schema_name: String,
table_name: String,
) -> Result<IdRow<Table>, CubeError> {
if schema_name != "s" {
return Err(CubeError::internal(
"only 's' schema defined in tests".to_string(),
));
}
let (pos, table) = self
.tables
.iter()
.find_position(|t| t.get_table_name() == &table_name)
.ok_or_else(|| CubeError::internal(format!("table {} not found", table_name)))?;
Ok(IdRow::new(pos as u64, table.clone()))
}
async fn get_schema_by_id(&self, schema_id: u64) -> Result<IdRow<Schema>, CubeError> {
if schema_id != 0 {
return Err(CubeError::internal(
"only 's' schema with id = 0 defined in tests".to_string(),
));
}
return Ok(self.schema());
}
async fn get_default_index(&self, table_id: u64) -> Result<IdRow<Index>, CubeError> {
let (pos, index) = self
.indices
.iter()
.find_position(|i| i.table_id() == table_id)
.ok_or_else(|| {
CubeError::internal(format!("index for table {} not found", table_id))
})?;
Ok(IdRow::new(pos as u64, index.clone()))
}
async fn get_table_indexes(&self, table_id: u64) -> Result<Vec<IdRow<Index>>, CubeError> {
Ok(self
.indices
.iter()
.enumerate()
.filter(|(_, i)| i.table_id() == table_id)
.map(|(pos, index)| IdRow::new(pos as u64, index.clone()))
.collect())
}
}
}
| 36.479579 | 183 | 0.535411 |
1df63ff9863aa3a9fccfdca8164879e4d96a6b21 | 21,751 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
mock_vm::{
encode_mint_transaction, encode_transfer_transaction, MockVM, DISCARD_STATUS, KEEP_STATUS,
},
Executor, OP_COUNTERS,
};
use config::config::{NodeConfig, NodeConfigHelpers};
use crypto::{hash::GENESIS_BLOCK_ID, HashValue};
use futures::executor::block_on;
use grpcio::{EnvBuilder, ServerBuilder};
use proptest::prelude::*;
use proto_conv::IntoProtoBytes;
use rusty_fork::{rusty_fork_id, rusty_fork_test, rusty_fork_test_name};
use std::{
collections::HashMap,
fs::File,
io::Write,
sync::{mpsc, Arc},
};
use storage_client::{StorageRead, StorageReadServiceClient, StorageWriteServiceClient};
use storage_proto::proto::storage_grpc::create_storage;
use storage_service::StorageService;
use types::{
account_address::{AccountAddress, ADDRESS_LENGTH},
crypto_proxies::LedgerInfoWithSignatures,
ledger_info::LedgerInfo,
transaction::{SignedTransaction, TransactionListWithProof, Version},
};
use vm_genesis::{encode_genesis_transaction, GENESIS_KEYPAIR};
fn get_config() -> NodeConfig {
let config = NodeConfigHelpers::get_single_node_test_config(true);
// Write out the genesis blob to the correct location.
// XXX Should this logic live in NodeConfigHelpers?
let genesis_txn = encode_genesis_transaction(&GENESIS_KEYPAIR.0, GENESIS_KEYPAIR.1.clone());
let mut file = File::create(&config.execution.genesis_file_location).unwrap();
file.write_all(&genesis_txn.into_proto_bytes().unwrap())
.unwrap();
config
}
fn create_storage_server(config: &mut NodeConfig) -> (grpcio::Server, mpsc::Receiver<()>) {
let (service, shutdown_receiver) = StorageService::new(&config.storage.get_dir());
let mut server = ServerBuilder::new(Arc::new(EnvBuilder::new().build()))
.register_service(create_storage(service))
.bind("localhost", 0)
.build()
.expect("Failed to create storage server.");
server.start();
assert_eq!(server.bind_addrs().len(), 1);
let (_, port) = server.bind_addrs()[0];
// This is a little messy -- technically the config should also be used to set up the storage
// server, but the code currently creates the storage server, binds it to a port, then sets up
// the config.
// XXX Clean this up a little.
config.storage.port = port;
(server, shutdown_receiver)
}
fn create_executor(config: &NodeConfig) -> Executor<MockVM> {
let client_env = Arc::new(EnvBuilder::new().build());
let read_client = Arc::new(StorageReadServiceClient::new(
Arc::clone(&client_env),
"localhost",
config.storage.port,
));
let write_client = Arc::new(StorageWriteServiceClient::new(
Arc::clone(&client_env),
"localhost",
config.storage.port,
None,
));
Executor::new(read_client, write_client, config)
}
fn execute_and_commit_block(executor: &TestExecutor, txn_index: u64) {
let txn = encode_mint_transaction(gen_address(txn_index), 100);
let parent_block_id = match txn_index {
0 => *GENESIS_BLOCK_ID,
x => gen_block_id(x),
};
let id = gen_block_id(txn_index + 1);
let response = block_on(executor.execute_block(vec![txn], parent_block_id, id))
.unwrap()
.unwrap();
assert_eq!(response.version(), txn_index + 1);
let ledger_info = gen_ledger_info(txn_index + 1, response.root_hash(), id, txn_index + 1);
block_on(executor.commit_block(ledger_info))
.unwrap()
.unwrap();
}
struct TestExecutor {
// The config is kept around because it owns the temp dir used in the test.
_config: NodeConfig,
storage_server: Option<grpcio::Server>,
shutdown_receiver: mpsc::Receiver<()>,
executor: Executor<MockVM>,
}
impl TestExecutor {
fn new() -> TestExecutor {
let mut config = get_config();
let (storage_server, shutdown_receiver) = create_storage_server(&mut config);
let executor = create_executor(&config);
TestExecutor {
_config: config,
storage_server: Some(storage_server),
shutdown_receiver,
executor,
}
}
}
impl std::ops::Deref for TestExecutor {
type Target = Executor<MockVM>;
fn deref(&self) -> &Self::Target {
&self.executor
}
}
impl Drop for TestExecutor {
fn drop(&mut self) {
self.storage_server
.take()
.expect("Storage server should exist.");
self.shutdown_receiver.recv().unwrap();
}
}
fn gen_address(index: u64) -> AccountAddress {
let bytes = index.to_be_bytes();
let mut buf = [0; ADDRESS_LENGTH];
buf[ADDRESS_LENGTH - 8..].copy_from_slice(&bytes);
AccountAddress::new(buf)
}
fn gen_block_id(index: u64) -> HashValue {
let bytes = index.to_be_bytes();
let mut buf = [0; HashValue::LENGTH];
buf[HashValue::LENGTH - 8..].copy_from_slice(&bytes);
HashValue::new(buf)
}
fn gen_ledger_info(
version: u64,
root_hash: HashValue,
commit_block_id: HashValue,
timestamp_usecs: u64,
) -> LedgerInfoWithSignatures {
let ledger_info = LedgerInfo::new(
version,
root_hash,
/* consensus_data_hash = */ HashValue::zero(),
commit_block_id,
/* epoch_num = */ 0,
timestamp_usecs,
None,
);
LedgerInfoWithSignatures::new(ledger_info, /* signatures = */ HashMap::new())
}
#[test]
fn test_executor_status() {
let executor = TestExecutor::new();
let txn0 = encode_mint_transaction(gen_address(0), 100);
let txn1 = encode_mint_transaction(gen_address(1), 100);
let txn2 = encode_transfer_transaction(gen_address(0), gen_address(1), 500);
let parent_block_id = *GENESIS_BLOCK_ID;
let block_id = gen_block_id(1);
let response =
block_on(executor.execute_block(vec![txn0, txn1, txn2], parent_block_id, block_id))
.unwrap()
.unwrap();
assert_eq!(
vec![
KEEP_STATUS.clone(),
KEEP_STATUS.clone(),
DISCARD_STATUS.clone()
],
response.status()
);
}
#[test]
fn test_executor_one_block() {
let executor = TestExecutor::new();
let parent_block_id = *GENESIS_BLOCK_ID;
let block_id = gen_block_id(1);
let version = 100;
let txns = (0..version)
.map(|i| encode_mint_transaction(gen_address(i), 100))
.collect();
let execute_block_future = executor.execute_block(txns, parent_block_id, block_id);
let execute_block_response = block_on(execute_block_future).unwrap().unwrap();
assert_eq!(execute_block_response.version(), 100);
let ledger_info = gen_ledger_info(version, execute_block_response.root_hash(), block_id, 1);
let commit_block_future = executor.commit_block(ledger_info);
let _commit_block_response = block_on(commit_block_future).unwrap().unwrap();
}
#[test]
fn test_executor_multiple_blocks() {
let executor = TestExecutor::new();
for i in 0..100 {
execute_and_commit_block(&executor, i)
}
}
#[test]
fn test_executor_execute_same_block_multiple_times() {
let parent_block_id = *GENESIS_BLOCK_ID;
let block_id = gen_block_id(1);
let version = 100;
let txns: Vec<_> = (0..version)
.map(|i| encode_mint_transaction(gen_address(i), 100))
.collect();
{
let executor = TestExecutor::new();
let mut responses = vec![];
for _i in 0..100 {
let execute_block_future =
executor.execute_block(txns.clone(), parent_block_id, block_id);
let execute_block_response = block_on(execute_block_future).unwrap().unwrap();
responses.push(execute_block_response);
}
responses.dedup();
assert_eq!(responses.len(), 1);
}
{
let executor = TestExecutor::new();
let mut futures = vec![];
for _i in 0..100 {
let execute_block_future =
executor.execute_block(txns.clone(), parent_block_id, block_id);
futures.push(execute_block_future);
}
let mut responses: Vec<_> = futures
.into_iter()
.map(|fut| block_on(fut).unwrap().unwrap())
.collect();
responses.dedup();
assert_eq!(responses.len(), 1);
}
}
rusty_fork_test! {
#[test]
fn test_num_accounts_created_counter() {
let executor = TestExecutor::new();
for i in 0..20 {
execute_and_commit_block(&executor, i);
assert_eq!(OP_COUNTERS.counter("num_accounts").get() as u64, i + 1);
}
}
}
/// Generates a list of `TransactionListWithProof`s according to the given ranges.
fn create_transaction_chunks(
chunk_ranges: Vec<std::ops::Range<Version>>,
) -> (Vec<TransactionListWithProof>, LedgerInfoWithSignatures) {
assert_eq!(chunk_ranges.first().unwrap().start, 1);
for i in 1..chunk_ranges.len() {
let previous_range = &chunk_ranges[i - 1];
let range = &chunk_ranges[i];
assert!(previous_range.start <= previous_range.end);
assert!(range.start <= range.end);
assert!(range.start <= previous_range.end);
assert!(previous_range.end <= range.end);
}
// To obtain the batches of transactions, we first execute and save all these transactions in a
// separate DB. Then we call get_transactions to retrieve them.
let mut config = get_config();
let (storage_server, shutdown_receiver) = create_storage_server(&mut config);
let executor = create_executor(&config);
let mut txns = vec![];
for i in 1..chunk_ranges.last().unwrap().end {
let txn = encode_mint_transaction(gen_address(i), 100);
txns.push(txn);
}
let id = gen_block_id(1);
let response = block_on(executor.execute_block(txns.clone(), *GENESIS_BLOCK_ID, id))
.unwrap()
.unwrap();
let ledger_version = txns.len() as u64;
let ledger_info = gen_ledger_info(ledger_version, response.root_hash(), id, 1);
block_on(executor.commit_block(ledger_info.clone()))
.unwrap()
.unwrap();
let storage_client = StorageReadServiceClient::new(
Arc::new(EnvBuilder::new().build()),
"localhost",
config.storage.port,
);
let batches: Vec<_> = chunk_ranges
.into_iter()
.map(|range| {
storage_client
.get_transactions(
range.start,
range.end - range.start,
ledger_version,
false, /* fetch_events */
)
.unwrap()
})
.collect();
drop(storage_server);
shutdown_receiver.recv().unwrap();
(batches, ledger_info)
}
#[test]
fn test_executor_execute_chunk() {
let first_batch_size = 30;
let second_batch_size = 40;
let third_batch_size = 20;
let overlapping_size = 5;
let (chunks, ledger_info) = {
let first_batch_start = 1;
let second_batch_start = first_batch_start + first_batch_size;
let third_batch_start = second_batch_start + second_batch_size - overlapping_size;
create_transaction_chunks(vec![
first_batch_start..first_batch_start + first_batch_size,
second_batch_start..second_batch_start + second_batch_size,
third_batch_start..third_batch_start + third_batch_size,
])
};
// Now we execute these two chunks of transactions.
let mut config = get_config();
let (storage_server, shutdown_receiver) = create_storage_server(&mut config);
let executor = create_executor(&config);
let storage_client = StorageReadServiceClient::new(
Arc::new(EnvBuilder::new().build()),
"localhost",
config.storage.port,
);
// Execute the first chunk. After that we should still get the genesis ledger info from DB.
block_on(executor.execute_chunk(chunks[0].clone(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li.ledger_info().version(), 0);
assert_eq!(li.ledger_info().consensus_block_id(), *GENESIS_BLOCK_ID);
// Execute the second chunk. After that we should still get the genesis ledger info from DB.
block_on(executor.execute_chunk(chunks[1].clone(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li.ledger_info().version(), 0);
assert_eq!(li.ledger_info().consensus_block_id(), *GENESIS_BLOCK_ID);
// Execute an empty chunk. After that we should still get the genesis ledger info from DB.
block_on(executor.execute_chunk(TransactionListWithProof::new_empty(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li.ledger_info().version(), 0);
assert_eq!(li.ledger_info().consensus_block_id(), *GENESIS_BLOCK_ID);
// Execute the second chunk again. After that we should still get the same thing.
block_on(executor.execute_chunk(chunks[1].clone(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li.ledger_info().version(), 0);
assert_eq!(li.ledger_info().consensus_block_id(), *GENESIS_BLOCK_ID);
// Execute the third chunk. After that we should get the new ledger info.
block_on(executor.execute_chunk(chunks[2].clone(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li, ledger_info);
drop(storage_server);
shutdown_receiver.recv().unwrap();
}
#[test]
fn test_executor_execute_chunk_restart() {
let first_batch_size = 30;
let second_batch_size = 40;
let (chunks, ledger_info) = {
let first_batch_start = 1;
let second_batch_start = first_batch_start + first_batch_size;
create_transaction_chunks(vec![
first_batch_start..first_batch_start + first_batch_size,
second_batch_start..second_batch_start + second_batch_size,
])
};
let mut config = get_config();
let (storage_server, shutdown_receiver) = create_storage_server(&mut config);
// First we simulate syncing the first chunk of transactions.
{
let executor = create_executor(&config);
let storage_client = StorageReadServiceClient::new(
Arc::new(EnvBuilder::new().build()),
"localhost",
config.storage.port,
);
block_on(executor.execute_chunk(chunks[0].clone(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li.ledger_info().version(), 0);
assert_eq!(li.ledger_info().consensus_block_id(), *GENESIS_BLOCK_ID);
}
// Then we restart executor and resume to the next chunk.
{
let executor = create_executor(&config);
let storage_client = StorageReadServiceClient::new(
Arc::new(EnvBuilder::new().build()),
"localhost",
config.storage.port,
);
block_on(executor.execute_chunk(chunks[1].clone(), ledger_info.clone()))
.unwrap()
.unwrap();
let (_, li, _) = storage_client.update_to_latest_ledger(0, vec![]).unwrap();
assert_eq!(li, ledger_info);
}
drop(storage_server);
shutdown_receiver.recv().unwrap();
}
struct TestBlock {
txns: Vec<SignedTransaction>,
parent_id: HashValue,
id: HashValue,
}
impl TestBlock {
fn new(
addr_index: std::ops::Range<u64>,
amount: u32,
parent_id: HashValue,
id: HashValue,
) -> Self {
TestBlock {
txns: addr_index
.map(|index| encode_mint_transaction(gen_address(index), u64::from(amount)))
.collect(),
parent_id,
id,
}
}
}
// Executes a list of transactions by executing and immediately commtting one at a time. Returns
// the root hash after all transactions are committed.
fn run_transactions_naive(transactions: Vec<SignedTransaction>) -> HashValue {
let executor = TestExecutor::new();
let mut iter = transactions.into_iter();
let first_txn = iter.next();
let response = block_on(executor.execute_block(
match first_txn {
None => vec![],
Some(txn) => vec![txn],
},
*GENESIS_BLOCK_ID,
gen_block_id(1),
))
.unwrap()
.unwrap();
let mut root_hash = response.root_hash();
for (i, txn) in iter.enumerate() {
let parent_block_id = gen_block_id(i as u64 + 1);
// when i = 0, id should be 2.
let id = gen_block_id(i as u64 + 2);
let response = block_on(executor.execute_block(vec![txn], parent_block_id, id))
.unwrap()
.unwrap();
root_hash = response.root_hash();
let ledger_info = gen_ledger_info(i as u64 + 2, root_hash, id, i as u64 + 1);
block_on(executor.commit_block(ledger_info))
.unwrap()
.unwrap();
}
root_hash
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_executor_two_branches(
a_size in 0..30u64,
b_size in 0..30u64,
c_size in 0..30u64,
amount in any::<u32>(),
) {
// Genesis -> A -> B
// |
// └--> C
let block_a = TestBlock::new(0..a_size, amount, *GENESIS_BLOCK_ID, gen_block_id(1));
let block_b = TestBlock::new(0..b_size, amount, gen_block_id(1), gen_block_id(2));
let block_c = TestBlock::new(0..c_size, amount, gen_block_id(1), gen_block_id(3));
// Execute block A, B and C. Hold all results in memory.
let executor = TestExecutor::new();
let response_a = block_on(executor.execute_block(
block_a.txns.clone(), block_a.parent_id, block_a.id,
)).unwrap().unwrap();
prop_assert_eq!(response_a.version(), a_size);
let response_b = block_on(executor.execute_block(
block_b.txns.clone(), block_b.parent_id, block_b.id,
)).unwrap().unwrap();
prop_assert_eq!(response_b.version(), a_size + b_size);
let response_c = block_on(executor.execute_block(
block_c.txns.clone(), block_c.parent_id, block_c.id,
)).unwrap().unwrap();
prop_assert_eq!(response_c.version(), a_size + c_size);
let root_hash_a = response_a.root_hash();
let root_hash_b = response_b.root_hash();
let root_hash_c = response_c.root_hash();
// Execute block A and B. Execute and commit one transaction at a time.
let expected_root_hash_a = run_transactions_naive(block_a.txns.clone());
prop_assert_eq!(root_hash_a, expected_root_hash_a);
let expected_root_hash_b = run_transactions_naive({
let mut txns = vec![];
txns.extend(block_a.txns.iter().cloned());
txns.extend(block_b.txns.iter().cloned());
txns
});
prop_assert_eq!(root_hash_b, expected_root_hash_b);
let expected_root_hash_c = run_transactions_naive({
let mut txns = vec![];
txns.extend(block_a.txns.iter().cloned());
txns.extend(block_c.txns.iter().cloned());
txns
});
prop_assert_eq!(root_hash_c, expected_root_hash_c);
}
#[test]
fn test_executor_restart(a_size in 0..30u64, b_size in 0..30u64, amount in any::<u32>()) {
let block_a = TestBlock::new(0..a_size, amount, *GENESIS_BLOCK_ID, gen_block_id(1));
let block_b = TestBlock::new(0..b_size, amount, gen_block_id(1), gen_block_id(2));
let mut config = get_config();
let (storage_server, shutdown_receiver) = create_storage_server(&mut config);
// First execute and commit one block, then destroy executor.
{
let executor = create_executor(&config);
let response_a = block_on(executor.execute_block(
block_a.txns.clone(), block_a.parent_id, block_a.id,
)).unwrap().unwrap();
let root_hash = response_a.root_hash();
let ledger_info = gen_ledger_info(block_a.txns.len() as u64, root_hash, block_a.id, 1);
block_on(executor.commit_block(ledger_info)).unwrap().unwrap();
}
// Now we construct a new executor and run one more block.
let root_hash = {
let executor = create_executor(&config);
let response_b = block_on(executor.execute_block(
block_b.txns.clone(), block_b.parent_id, block_b.id,
)).unwrap().unwrap();
let root_hash = response_b.root_hash();
let ledger_info = gen_ledger_info(
(block_a.txns.len() + block_b.txns.len()) as u64,
root_hash,
block_b.id,
2,
);
block_on(executor.commit_block(ledger_info)).unwrap().unwrap();
root_hash
};
let expected_root_hash = run_transactions_naive({
let mut txns = vec![];
txns.extend(block_a.txns.iter().cloned());
txns.extend(block_b.txns.iter().cloned());
txns
});
prop_assert_eq!(root_hash, expected_root_hash);
drop(storage_server);
shutdown_receiver.recv().unwrap();
}
}
| 34.525397 | 99 | 0.632661 |
cc42cd89da26b6333d0f355840d5c84c7dbc9849 | 10,286 | use super::{Map, Rect, TileType, Position, spawner, SHOW_MAPGEN_VISUALIZER};
use specs::prelude::*;
mod simple_map;
mod bsp_dungeon;
mod bsp_interior;
mod cellular_automata;
mod drunkard;
mod maze;
mod dla;
mod common;
mod voronoi;
mod waveform_collapse;
mod prefab_builder;
mod room_based_spawner;
mod room_based_starting_position;
mod room_based_stairs;
mod area_starting_points;
mod cull_unreachable;
mod voronoi_spawning;
mod distant_exit;
mod room_exploder;
mod room_corner_rounding;
mod rooms_corridors_dogleg;
mod rooms_corridors_bsp;
mod room_sorter;
mod room_draw;
mod rooms_corridors_nearest;
mod rooms_corridors_lines;
mod room_corridor_spawner;
mod door_placement;
mod town;
mod forest;
mod limestone_cavern;
use forest::forest_builder;
use limestone_cavern::limestone_cavern_builder;
use distant_exit::DistantExit;
use simple_map::SimpleMapBuilder;
use bsp_dungeon::BspDungeonBuilder;
use bsp_interior::BspInteriorBuilder;
use cellular_automata::CellularAutomataBuilder;
use drunkard::DrunkardsWalkBuilder;
use voronoi::VoronoiCellBuilder;
use waveform_collapse::WaveformCollapseBuilder;
use prefab_builder::PrefabBuilder;
use room_based_spawner::RoomBasedSpawner;
use room_based_starting_position::RoomBasedStartingPosition;
use room_based_stairs::RoomBasedStairs;
use area_starting_points::{AreaStartingPosition, XStart, YStart};
use cull_unreachable::CullUnreachable;
use voronoi_spawning::VoronoiSpawning;
use maze::MazeBuilder;
use dla::DLABuilder;
use common::*;
use room_exploder::RoomExploder;
use room_corner_rounding::RoomCornerRounder;
use rooms_corridors_dogleg::DoglegCorridors;
use rooms_corridors_bsp::BspCorridors;
use room_sorter::{RoomSorter, RoomSort};
use room_draw::RoomDrawer;
use rooms_corridors_nearest::NearestCorridors;
use rooms_corridors_lines::StraightLineCorridors;
use room_corridor_spawner::CorridorSpawner;
use door_placement::DoorPlacement;
use town::town_builder;
pub struct BuilderMap {
pub spawn_list : Vec<(usize, String)>,
pub map : Map,
pub starting_position : Option<Position>,
pub rooms: Option<Vec<Rect>>,
pub corridors: Option<Vec<Vec<usize>>>,
pub history : Vec<Map>,
pub width: i32,
pub height: i32
}
impl BuilderMap {
fn take_snapshot(&mut self) {
if SHOW_MAPGEN_VISUALIZER {
let mut snapshot = self.map.clone();
for v in snapshot.revealed_tiles.iter_mut() {
*v = true;
}
self.history.push(snapshot);
}
}
}
pub struct BuilderChain {
starter: Option<Box<dyn InitialMapBuilder>>,
builders: Vec<Box<dyn MetaMapBuilder>>,
pub build_data : BuilderMap
}
impl BuilderChain {
pub fn new<S : ToString>(new_depth : i32, width: i32, height: i32, name : S) -> BuilderChain {
BuilderChain{
starter: None,
builders: Vec::new(),
build_data : BuilderMap {
spawn_list: Vec::new(),
map: Map::new(new_depth, width, height, name),
starting_position: None,
rooms: None,
corridors: None,
history : Vec::new(),
width,
height
}
}
}
pub fn start_with(&mut self, starter : Box<dyn InitialMapBuilder>) {
match self.starter {
None => self.starter = Some(starter),
Some(_) => panic!("You can only have one starting builder.")
};
}
pub fn with(&mut self, metabuilder : Box<dyn MetaMapBuilder>) {
self.builders.push(metabuilder);
}
pub fn build_map(&mut self, rng : &mut rltk::RandomNumberGenerator) {
match &mut self.starter {
None => panic!("Cannot run a map builder chain without a starting build system"),
Some(starter) => {
// Build the starting map
starter.build_map(rng, &mut self.build_data);
}
}
// Build additional layers in turn
for metabuilder in self.builders.iter_mut() {
metabuilder.build_map(rng, &mut self.build_data);
}
}
pub fn spawn_entities(&mut self, ecs : &mut World) {
for entity in self.build_data.spawn_list.iter() {
spawner::spawn_entity(ecs, &(&entity.0, &entity.1));
}
}
}
pub trait InitialMapBuilder {
fn build_map(&mut self, rng: &mut rltk::RandomNumberGenerator, build_data : &mut BuilderMap);
}
pub trait MetaMapBuilder {
fn build_map(&mut self, rng: &mut rltk::RandomNumberGenerator, build_data : &mut BuilderMap);
}
fn random_start_position(rng: &mut rltk::RandomNumberGenerator) -> (XStart, YStart) {
let x;
let xroll = rng.roll_dice(1, 3);
match xroll {
1 => x = XStart::LEFT,
2 => x = XStart::CENTER,
_ => x = XStart::RIGHT
}
let y;
let yroll = rng.roll_dice(1, 3);
match yroll {
1 => y = YStart::BOTTOM,
2 => y = YStart::CENTER,
_ => y = YStart::TOP
}
(x, y)
}
fn random_room_builder(rng: &mut rltk::RandomNumberGenerator, builder : &mut BuilderChain) {
let build_roll = rng.roll_dice(1, 3);
match build_roll {
1 => builder.start_with(SimpleMapBuilder::new()),
2 => builder.start_with(BspDungeonBuilder::new()),
_ => builder.start_with(BspInteriorBuilder::new())
}
// BSP Interior still makes holes in the walls
if build_roll != 3 {
// Sort by one of the 5 available algorithms
let sort_roll = rng.roll_dice(1, 5);
match sort_roll {
1 => builder.with(RoomSorter::new(RoomSort::LEFTMOST)),
2 => builder.with(RoomSorter::new(RoomSort::RIGHTMOST)),
3 => builder.with(RoomSorter::new(RoomSort::TOPMOST)),
4 => builder.with(RoomSorter::new(RoomSort::BOTTOMMOST)),
_ => builder.with(RoomSorter::new(RoomSort::CENTRAL)),
}
builder.with(RoomDrawer::new());
let corridor_roll = rng.roll_dice(1, 4);
match corridor_roll {
1 => builder.with(DoglegCorridors::new()),
2 => builder.with(NearestCorridors::new()),
3 => builder.with(StraightLineCorridors::new()),
_ => builder.with(BspCorridors::new())
}
let cspawn_roll = rng.roll_dice(1, 2);
if cspawn_roll == 1 {
builder.with(CorridorSpawner::new());
}
let modifier_roll = rng.roll_dice(1, 6);
match modifier_roll {
1 => builder.with(RoomExploder::new()),
2 => builder.with(RoomCornerRounder::new()),
_ => {}
}
}
let start_roll = rng.roll_dice(1, 2);
match start_roll {
1 => builder.with(RoomBasedStartingPosition::new()),
_ => {
let (start_x, start_y) = random_start_position(rng);
builder.with(AreaStartingPosition::new(start_x, start_y));
}
}
let exit_roll = rng.roll_dice(1, 2);
match exit_roll {
1 => builder.with(RoomBasedStairs::new()),
_ => builder.with(DistantExit::new())
}
let spawn_roll = rng.roll_dice(1, 2);
match spawn_roll {
1 => builder.with(RoomBasedSpawner::new()),
_ => builder.with(VoronoiSpawning::new())
}
}
fn random_shape_builder(rng: &mut rltk::RandomNumberGenerator, builder : &mut BuilderChain) {
let builder_roll = rng.roll_dice(1, 16);
match builder_roll {
1 => builder.start_with(CellularAutomataBuilder::new()),
2 => builder.start_with(DrunkardsWalkBuilder::open_area()),
3 => builder.start_with(DrunkardsWalkBuilder::open_halls()),
4 => builder.start_with(DrunkardsWalkBuilder::winding_passages()),
5 => builder.start_with(DrunkardsWalkBuilder::fat_passages()),
6 => builder.start_with(DrunkardsWalkBuilder::fearful_symmetry()),
7 => builder.start_with(MazeBuilder::new()),
8 => builder.start_with(DLABuilder::walk_inwards()),
9 => builder.start_with(DLABuilder::walk_outwards()),
10 => builder.start_with(DLABuilder::central_attractor()),
11 => builder.start_with(DLABuilder::insectoid()),
12 => builder.start_with(VoronoiCellBuilder::pythagoras()),
13 => builder.start_with(VoronoiCellBuilder::manhattan()),
_ => builder.start_with(PrefabBuilder::constant(prefab_builder::prefab_levels::WFC_POPULATED)),
}
// Set the start to the center and cull
builder.with(AreaStartingPosition::new(XStart::CENTER, YStart::CENTER));
builder.with(CullUnreachable::new());
// Now set the start to a random starting area
let (start_x, start_y) = random_start_position(rng);
builder.with(AreaStartingPosition::new(start_x, start_y));
// Setup an exit and spawn mobs
builder.with(VoronoiSpawning::new());
builder.with(DistantExit::new());
}
pub fn random_builder(new_depth: i32, rng: &mut rltk::RandomNumberGenerator, width: i32, height: i32) -> BuilderChain {
let mut builder = BuilderChain::new(new_depth, width, height, "New Map");
let type_roll = rng.roll_dice(1, 2);
match type_roll {
1 => random_room_builder(rng, &mut builder),
_ => random_shape_builder(rng, &mut builder)
}
if rng.roll_dice(1, 3)==1 {
builder.with(WaveformCollapseBuilder::new());
// Now set the start to a random starting area
let (start_x, start_y) = random_start_position(rng);
builder.with(AreaStartingPosition::new(start_x, start_y));
// Setup an exit and spawn mobs
builder.with(VoronoiSpawning::new());
builder.with(DistantExit::new());
}
if rng.roll_dice(1, 20)==1 {
builder.with(PrefabBuilder::sectional(prefab_builder::prefab_sections::UNDERGROUND_FORT));
}
builder.with(DoorPlacement::new());
builder.with(PrefabBuilder::vaults());
builder
}
pub fn level_builder(new_depth: i32, rng: &mut rltk::RandomNumberGenerator, width: i32, height: i32) -> BuilderChain {
println!("Depth: {}", new_depth);
match new_depth {
1 => town_builder(new_depth, rng, width, height),
2 => forest_builder(new_depth, rng, width, height),
3 => limestone_cavern_builder(new_depth, rng, width, height),
_ => random_builder(new_depth, rng, width, height)
}
}
| 33.180645 | 119 | 0.652635 |
e9856b5762661a9d23f117444e1eab6d1dffe092 | 2,520 | //! Test the serial interface in Half-Duplex mode.
//!
//! This example requires you to hook-up a pullup resistor on the TX pin. RX pin is not used.
//! Resistor value depends on the baurate and line caracteristics, 1KOhms works well in most cases.
//! Half-Duplex mode internally connect TX to RX, meaning that bytes sent will also be received.
#![deny(unsafe_code)]
#![deny(warnings)]
#![no_main]
#![no_std]
extern crate cortex_m;
#[macro_use(entry, exception)]
extern crate cortex_m_rt as rt;
#[macro_use(block)]
extern crate nb;
extern crate panic_semihosting;
extern crate stm32l4xx_hal as hal;
// #[macro_use(block)]
// extern crate nb;
use crate::hal::prelude::*;
use crate::hal::serial::{Config, Serial};
use crate::rt::ExceptionFrame;
use cortex_m::asm;
#[entry]
fn main() -> ! {
let p = hal::stm32::Peripherals::take().unwrap();
let mut flash = p.FLASH.constrain();
let mut rcc = p.RCC.constrain();
let mut pwr = p.PWR.constrain(&mut rcc.apb1r1);
let mut gpioa = p.GPIOA.split(&mut rcc.ahb2);
// let mut gpiob = p.GPIOB.split(&mut rcc.ahb2);
// clock configuration using the default settings (all clocks run at 8 MHz)
// let clocks = rcc.cfgr.freeze(&mut flash.acr);
// TRY this alternate clock configuration (clocks run at nearly the maximum frequency)
let clocks = rcc
.cfgr
.sysclk(80.mhz())
.pclk1(80.mhz())
.pclk2(80.mhz())
.freeze(&mut flash.acr, &mut pwr);
// The Serial API is highly generic
// TRY the commented out, different pin configurations
// let tx = gpioa.pa9.into_af7(&mut gpioa.moder, &mut gpioa.afrh).set_open_drain();
let tx = gpioa
.pa2
.into_af7(&mut gpioa.moder, &mut gpioa.afrl)
.set_open_drain();
// let tx = gpiob.pb6.into_af7(&mut gpiob.moder, &mut gpiob.afrl).set_open_drain();
// TRY using a different USART peripheral here
let serial = Serial::usart2(
p.USART2,
(tx,),
Config::default().baudrate(9_600.bps()),
clocks,
&mut rcc.apb1r1,
);
let (mut tx, mut rx) = serial.split();
let sent = b'X';
// The `block!` macro makes an operation block until it finishes
// NOTE the error type is `!`
block!(tx.write(sent)).ok();
let received = block!(rx.read()).unwrap();
assert_eq!(received, sent);
// if all goes well you should reach this breakpoint
asm::bkpt();
loop {}
}
#[exception]
fn HardFault(ef: &ExceptionFrame) -> ! {
panic!("{:#?}", ef);
}
| 28.636364 | 99 | 0.640476 |
1803adee3c18e04de03142700875c3d38e544502 | 32,008 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Basic functions for dealing with memory.
//!
//! This module contains functions for querying the size and alignment of
//! types, initializing and manipulating memory.
#![stable(feature = "rust1", since = "1.0.0")]
use clone;
use cmp;
use fmt;
use hash;
use intrinsics;
use marker::{Copy, PhantomData, Sized};
use ptr;
use ops::{Deref, DerefMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use intrinsics::transmute;
/// Takes ownership and "forgets" about the value **without running its destructor**.
///
/// Any resources the value manages, such as heap memory or a file handle, will linger
/// forever in an unreachable state. However, it does not guarantee that pointers
/// to this memory will remain valid.
///
/// * If you want to leak memory, see [`Box::leak`][leak].
/// * If you want to obtain a raw pointer to the memory, see [`Box::into_raw`][into_raw].
/// * If you want to dispose of a value properly, running its destructor, see
/// [`mem::drop`][drop].
///
/// # Safety
///
/// `forget` is not marked as `unsafe`, because Rust's safety guarantees
/// do not include a guarantee that destructors will always run. For example,
/// a program can create a reference cycle using [`Rc`][rc], or call
/// [`process::exit`][exit] to exit without running destructors. Thus, allowing
/// `mem::forget` from safe code does not fundamentally change Rust's safety
/// guarantees.
///
/// That said, leaking resources such as memory or I/O objects is usually undesirable,
/// so `forget` is only recommended for specialized use cases like those shown below.
///
/// Because forgetting a value is allowed, any `unsafe` code you write must
/// allow for this possibility. You cannot return a value and expect that the
/// caller will necessarily run the value's destructor.
///
/// [rc]: ../../std/rc/struct.Rc.html
/// [exit]: ../../std/process/fn.exit.html
///
/// # Examples
///
/// Leak an I/O object, never closing the file:
///
/// ```no_run
/// use std::mem;
/// use std::fs::File;
///
/// let file = File::open("foo.txt").unwrap();
/// mem::forget(file);
/// ```
///
/// The practical use cases for `forget` are rather specialized and mainly come
/// up in unsafe or FFI code.
///
/// ## Use case 1
///
/// You have created an uninitialized value using [`mem::uninitialized`][uninit].
/// You must either initialize or `forget` it on every computation path before
/// Rust drops it automatically, like at the end of a scope or after a panic.
/// Running the destructor on an uninitialized value would be [undefined behavior][ub].
///
/// ```
/// use std::mem;
/// use std::ptr;
///
/// # let some_condition = false;
/// unsafe {
/// let mut uninit_vec: Vec<u32> = mem::uninitialized();
///
/// if some_condition {
/// // Initialize the variable.
/// ptr::write(&mut uninit_vec, Vec::new());
/// } else {
/// // Forget the uninitialized value so its destructor doesn't run.
/// mem::forget(uninit_vec);
/// }
/// }
/// ```
///
/// ## Use case 2
///
/// You have duplicated the bytes making up a value, without doing a proper
/// [`Clone`][clone]. You need the value's destructor to run only once,
/// because a double `free` is undefined behavior.
///
/// An example is a possible implementation of [`mem::swap`][swap]:
///
/// ```
/// use std::mem;
/// use std::ptr;
///
/// # #[allow(dead_code)]
/// fn swap<T>(x: &mut T, y: &mut T) {
/// unsafe {
/// // Give ourselves some scratch space to work with
/// let mut t: T = mem::uninitialized();
///
/// // Perform the swap, `&mut` pointers never alias
/// ptr::copy_nonoverlapping(&*x, &mut t, 1);
/// ptr::copy_nonoverlapping(&*y, x, 1);
/// ptr::copy_nonoverlapping(&t, y, 1);
///
/// // y and t now point to the same thing, but we need to completely
/// // forget `t` because we do not want to run the destructor for `T`
/// // on its value, which is still owned somewhere outside this function.
/// mem::forget(t);
/// }
/// }
/// ```
///
/// [drop]: fn.drop.html
/// [uninit]: fn.uninitialized.html
/// [clone]: ../clone/trait.Clone.html
/// [swap]: fn.swap.html
/// [FFI]: ../../book/first-edition/ffi.html
/// [box]: ../../std/boxed/struct.Box.html
/// [leak]: ../../std/boxed/struct.Box.html#method.leak
/// [into_raw]: ../../std/boxed/struct.Box.html#method.into_raw
/// [ub]: ../../reference/behavior-considered-undefined.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn forget<T>(t: T) {
ManuallyDrop::new(t);
}
/// Returns the size of a type in bytes.
///
/// More specifically, this is the offset in bytes between successive elements
/// in an array with that item type including alignment padding. Thus, for any
/// type `T` and length `n`, `[T; n]` has a size of `n * size_of::<T>()`.
///
/// In general, the size of a type is not stable across compilations, but
/// specific types such as primitives are.
///
/// The following table gives the size for primitives.
///
/// Type | size_of::\<Type>()
/// ---- | ---------------
/// () | 0
/// bool | 1
/// u8 | 1
/// u16 | 2
/// u32 | 4
/// u64 | 8
/// u128 | 16
/// i8 | 1
/// i16 | 2
/// i32 | 4
/// i64 | 8
/// i128 | 16
/// f32 | 4
/// f64 | 8
/// char | 4
///
/// Furthermore, `usize` and `isize` have the same size.
///
/// The types `*const T`, `&T`, `Box<T>`, `Option<&T>`, and `Option<Box<T>>` all have
/// the same size. If `T` is Sized, all of those types have the same size as `usize`.
///
/// The mutability of a pointer does not change its size. As such, `&T` and `&mut T`
/// have the same size. Likewise for `*const T` and `*mut T`.
///
/// # Size of `#[repr(C)]` items
///
/// The `C` representation for items has a defined layout. With this layout,
/// the size of items is also stable as long as all fields have a stable size.
///
/// ## Size of Structs
///
/// For `structs`, the size is determined by the following algorithm.
///
/// For each field in the struct ordered by declaration order:
///
/// 1. Add the size of the field.
/// 2. Round up the current size to the nearest multiple of the next field's [alignment].
///
/// Finally, round the size of the struct to the nearest multiple of its [alignment].
/// The alignment of the struct is usually the largest alignment of all its
/// fields; this can be changed with the use of `repr(align(N))`.
///
/// Unlike `C`, zero sized structs are not rounded up to one byte in size.
///
/// ## Size of Enums
///
/// Enums that carry no data other than the descriminant have the same size as C enums
/// on the platform they are compiled for.
///
/// ## Size of Unions
///
/// The size of a union is the size of its largest field.
///
/// Unlike `C`, zero sized unions are not rounded up to one byte in size.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// // Some primitives
/// assert_eq!(4, mem::size_of::<i32>());
/// assert_eq!(8, mem::size_of::<f64>());
/// assert_eq!(0, mem::size_of::<()>());
///
/// // Some arrays
/// assert_eq!(8, mem::size_of::<[i32; 2]>());
/// assert_eq!(12, mem::size_of::<[i32; 3]>());
/// assert_eq!(0, mem::size_of::<[i32; 0]>());
///
///
/// // Pointer size equality
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<*const i32>());
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Box<i32>>());
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Option<&i32>>());
/// assert_eq!(mem::size_of::<Box<i32>>(), mem::size_of::<Option<Box<i32>>>());
/// ```
///
/// Using `#[repr(C)]`.
///
/// ```
/// use std::mem;
///
/// #[repr(C)]
/// struct FieldStruct {
/// first: u8,
/// second: u16,
/// third: u8
/// }
///
/// // The size of the first field is 1, so add 1 to the size. Size is 1.
/// // The alignment of the second field is 2, so add 1 to the size for padding. Size is 2.
/// // The size of the second field is 2, so add 2 to the size. Size is 4.
/// // The alignment of the third field is 1, so add 0 to the size for padding. Size is 4.
/// // The size of the third field is 1, so add 1 to the size. Size is 5.
/// // Finally, the alignment of the struct is 2 (because the largest alignment amongst its
/// // fields is 2), so add 1 to the size for padding. Size is 6.
/// assert_eq!(6, mem::size_of::<FieldStruct>());
///
/// #[repr(C)]
/// struct TupleStruct(u8, u16, u8);
///
/// // Tuple structs follow the same rules.
/// assert_eq!(6, mem::size_of::<TupleStruct>());
///
/// // Note that reordering the fields can lower the size. We can remove both padding bytes
/// // by putting `third` before `second`.
/// #[repr(C)]
/// struct FieldStructOptimized {
/// first: u8,
/// third: u8,
/// second: u16
/// }
///
/// assert_eq!(4, mem::size_of::<FieldStructOptimized>());
///
/// // Union size is the size of the largest field.
/// #[repr(C)]
/// union ExampleUnion {
/// smaller: u8,
/// larger: u16
/// }
///
/// assert_eq!(2, mem::size_of::<ExampleUnion>());
/// ```
///
/// [alignment]: ./fn.align_of.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
pub const fn size_of<T>() -> usize {
intrinsics::size_of::<T>()
}
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
/// Ceci n'est pas la documentation
pub const fn size_of<T>() -> usize {
unsafe { intrinsics::size_of::<T>() }
}
/// Returns the size of the pointed-to value in bytes.
///
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
/// statically known size, e.g. a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val` can be used to get the dynamically-known size.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/first-edition/trait-objects.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::size_of_val(&5i32));
///
/// let x: [u8; 13] = [0; 13];
/// let y: &[u8] = &x;
/// assert_eq!(13, mem::size_of_val(y));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn size_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::size_of_val(val) }
}
/// Returns the [ABI]-required minimum alignment of a type.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of::<i32>());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of` instead", since = "1.2.0")]
#[cfg(not(stage0))]
pub fn min_align_of<T>() -> usize {
intrinsics::min_align_of::<T>()
}
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of` instead", since = "1.2.0")]
#[cfg(stage0)]
/// Ceci n'est pas la documentation
pub fn min_align_of<T>() -> usize {
unsafe { intrinsics::min_align_of::<T>() }
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of_val` instead", since = "1.2.0")]
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}
/// Returns the [ABI]-required minimum alignment of a type.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of::<i32>());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(stage0))]
pub const fn align_of<T>() -> usize {
intrinsics::min_align_of::<T>()
}
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(stage0)]
/// Ceci n'est pas la documentation
pub const fn align_of<T>() -> usize {
unsafe { intrinsics::min_align_of::<T>() }
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of_val(&5i32));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}
/// Returns whether dropping values of type `T` matters.
///
/// This is purely an optimization hint, and may be implemented conservatively:
/// it may return `true` for types that don't actually need to be dropped.
/// As such always returning `true` would be a valid implementation of
/// this function. However if this function actually returns `false`, then you
/// can be certain dropping `T` has no side effect.
///
/// Low level implementations of things like collections, which need to manually
/// drop their data, should use this function to avoid unnecessarily
/// trying to drop all their contents when they are destroyed. This might not
/// make a difference in release builds (where a loop that has no side-effects
/// is easily detected and eliminated), but is often a big win for debug builds.
///
/// Note that `ptr::drop_in_place` already performs this check, so if your workload
/// can be reduced to some small number of drop_in_place calls, using this is
/// unnecessary. In particular note that you can drop_in_place a slice, and that
/// will do a single needs_drop check for all the values.
///
/// Types like Vec therefore just `drop_in_place(&mut self[..])` without using
/// needs_drop explicitly. Types like HashMap, on the other hand, have to drop
/// values one at a time and should use this API.
///
///
/// # Examples
///
/// Here's an example of how a collection might make use of needs_drop:
///
/// ```
/// use std::{mem, ptr};
///
/// pub struct MyCollection<T> {
/// # data: [T; 1],
/// /* ... */
/// }
/// # impl<T> MyCollection<T> {
/// # fn iter_mut(&mut self) -> &mut [T] { &mut self.data }
/// # fn free_buffer(&mut self) {}
/// # }
///
/// impl<T> Drop for MyCollection<T> {
/// fn drop(&mut self) {
/// unsafe {
/// // drop the data
/// if mem::needs_drop::<T>() {
/// for x in self.iter_mut() {
/// ptr::drop_in_place(x);
/// }
/// }
/// self.free_buffer();
/// }
/// }
/// }
/// ```
#[inline]
#[stable(feature = "needs_drop", since = "1.21.0")]
pub fn needs_drop<T>() -> bool {
unsafe { intrinsics::needs_drop::<T>() }
}
/// Creates a value whose bytes are all zero.
///
/// This has the same effect as allocating space with
/// [`mem::uninitialized`][uninit] and then zeroing it out. It is useful for
/// [FFI] sometimes, but should generally be avoided.
///
/// There is no guarantee that an all-zero byte-pattern represents a valid value of
/// some type `T`. If `T` has a destructor and the value is destroyed (due to
/// a panic or the end of a scope) before being initialized, then the destructor
/// will run on zeroed data, likely leading to [undefined behavior][ub].
///
/// See also the documentation for [`mem::uninitialized`][uninit], which has
/// many of the same caveats.
///
/// [uninit]: fn.uninitialized.html
/// [FFI]: ../../book/first-edition/ffi.html
/// [ub]: ../../reference/behavior-considered-undefined.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// let x: i32 = unsafe { mem::zeroed() };
/// assert_eq!(0, x);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn zeroed<T>() -> T {
intrinsics::init()
}
/// Bypasses Rust's normal memory-initialization checks by pretending to
/// produce a value of type `T`, while doing nothing at all.
///
/// **This is incredibly dangerous and should not be done lightly. Deeply
/// consider initializing your memory with a default value instead.**
///
/// This is useful for [FFI] functions and initializing arrays sometimes,
/// but should generally be avoided.
///
/// [FFI]: ../../book/first-edition/ffi.html
///
/// # Undefined behavior
///
/// It is [undefined behavior][ub] to read uninitialized memory, even just an
/// uninitialized boolean. For instance, if you branch on the value of such
/// a boolean, your program may take one, both, or neither of the branches.
///
/// Writing to the uninitialized value is similarly dangerous. Rust believes the
/// value is initialized, and will therefore try to [`Drop`] the uninitialized
/// value and its fields if you try to overwrite it in a normal manner. The only way
/// to safely initialize an uninitialized value is with [`ptr::write`][write],
/// [`ptr::copy`][copy], or [`ptr::copy_nonoverlapping`][copy_no].
///
/// If the value does implement [`Drop`], it must be initialized before
/// it goes out of scope (and therefore would be dropped). Note that this
/// includes a `panic` occurring and unwinding the stack suddenly.
///
/// # Examples
///
/// Here's how to safely initialize an array of [`Vec`]s.
///
/// ```
/// use std::mem;
/// use std::ptr;
///
/// // Only declare the array. This safely leaves it
/// // uninitialized in a way that Rust will track for us.
/// // However we can't initialize it element-by-element
/// // safely, and we can't use the `[value; 1000]`
/// // constructor because it only works with `Copy` data.
/// let mut data: [Vec<u32>; 1000];
///
/// unsafe {
/// // So we need to do this to initialize it.
/// data = mem::uninitialized();
///
/// // DANGER ZONE: if anything panics or otherwise
/// // incorrectly reads the array here, we will have
/// // Undefined Behavior.
///
/// // It's ok to mutably iterate the data, since this
/// // doesn't involve reading it at all.
/// // (ptr and len are statically known for arrays)
/// for elem in &mut data[..] {
/// // *elem = Vec::new() would try to drop the
/// // uninitialized memory at `elem` -- bad!
/// //
/// // Vec::new doesn't allocate or do really
/// // anything. It's only safe to call here
/// // because we know it won't panic.
/// ptr::write(elem, Vec::new());
/// }
///
/// // SAFE ZONE: everything is initialized.
/// }
///
/// println!("{:?}", &data[0]);
/// ```
///
/// This example emphasizes exactly how delicate and dangerous using `mem::uninitialized`
/// can be. Note that the [`vec!`] macro *does* let you initialize every element with a
/// value that is only [`Clone`], so the following is semantically equivalent and
/// vastly less dangerous, as long as you can live with an extra heap
/// allocation:
///
/// ```
/// let data: Vec<Vec<u32>> = vec![Vec::new(); 1000];
/// println!("{:?}", &data[0]);
/// ```
///
/// [`Vec`]: ../../std/vec/struct.Vec.html
/// [`vec!`]: ../../std/macro.vec.html
/// [`Clone`]: ../../std/clone/trait.Clone.html
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [write]: ../ptr/fn.write.html
/// [copy]: ../intrinsics/fn.copy.html
/// [copy_no]: ../intrinsics/fn.copy_nonoverlapping.html
/// [`Drop`]: ../ops/trait.Drop.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn uninitialized<T>() -> T {
intrinsics::uninit()
}
/// Swaps the values at two mutable locations, without deinitializing either one.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// let mut x = 5;
/// let mut y = 42;
///
/// mem::swap(&mut x, &mut y);
///
/// assert_eq!(42, x);
/// assert_eq!(5, y);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn swap<T>(x: &mut T, y: &mut T) {
unsafe {
ptr::swap_nonoverlapping_one(x, y);
}
}
/// Moves `src` into the referenced `dest`, returning the previous `dest` value.
///
/// Neither value is dropped.
///
/// # Examples
///
/// A simple example:
///
/// ```
/// use std::mem;
///
/// let mut v: Vec<i32> = vec![1, 2];
///
/// let old_v = mem::replace(&mut v, vec![3, 4, 5]);
/// assert_eq!(2, old_v.len());
/// assert_eq!(3, v.len());
/// ```
///
/// `replace` allows consumption of a struct field by replacing it with another value.
/// Without `replace` you can run into issues like these:
///
/// ```compile_fail,E0507
/// struct Buffer<T> { buf: Vec<T> }
///
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// // error: cannot move out of dereference of `&mut`-pointer
/// let buf = self.buf;
/// self.buf = Vec::new();
/// buf
/// }
/// }
/// ```
///
/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset
/// `self.buf`. But `replace` can be used to disassociate the original value of `self.buf` from
/// `self`, allowing it to be returned:
///
/// ```
/// # #![allow(dead_code)]
/// use std::mem;
///
/// # struct Buffer<T> { buf: Vec<T> }
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// mem::replace(&mut self.buf, Vec::new())
/// }
/// }
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn replace<T>(dest: &mut T, mut src: T) -> T {
swap(dest, &mut src);
src
}
/// Disposes of a value.
///
/// While this does call the argument's implementation of [`Drop`][drop],
/// it will not release any borrows, as borrows are based on lexical scope.
///
/// This effectively does nothing for
/// [types which implement `Copy`](../../book/first-edition/ownership.html#copy-types),
/// e.g. integers. Such values are copied and _then_ moved into the function,
/// so the value persists after this function call.
///
/// This function is not magic; it is literally defined as
///
/// ```
/// pub fn drop<T>(_x: T) { }
/// ```
///
/// Because `_x` is moved into the function, it is automatically dropped before
/// the function returns.
///
/// [drop]: ../ops/trait.Drop.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let v = vec![1, 2, 3];
///
/// drop(v); // explicitly drop the vector
/// ```
///
/// Borrows are based on lexical scope, so this produces an error:
///
/// ```compile_fail,E0502
/// let mut v = vec![1, 2, 3];
/// let x = &v[0];
///
/// drop(x); // explicitly drop the reference, but the borrow still exists
///
/// v.push(4); // error: cannot borrow `v` as mutable because it is also
/// // borrowed as immutable
/// ```
///
/// An inner scope is needed to fix this:
///
/// ```
/// let mut v = vec![1, 2, 3];
///
/// {
/// let x = &v[0];
///
/// drop(x); // this is now redundant, as `x` is going out of scope anyway
/// }
///
/// v.push(4); // no problems
/// ```
///
/// Since [`RefCell`] enforces the borrow rules at runtime, `drop` can
/// release a [`RefCell`] borrow:
///
/// ```
/// use std::cell::RefCell;
///
/// let x = RefCell::new(1);
///
/// let mut mutable_borrow = x.borrow_mut();
/// *mutable_borrow = 1;
///
/// drop(mutable_borrow); // relinquish the mutable borrow on this slot
///
/// let borrow = x.borrow();
/// println!("{}", *borrow);
/// ```
///
/// Integers and other types implementing [`Copy`] are unaffected by `drop`.
///
/// ```
/// #[derive(Copy, Clone)]
/// struct Foo(u8);
///
/// let x = 1;
/// let y = Foo(2);
/// drop(x); // a copy of `x` is moved and dropped
/// drop(y); // a copy of `y` is moved and dropped
///
/// println!("x: {}, y: {}", x, y.0); // still available
/// ```
///
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`Copy`]: ../../std/marker/trait.Copy.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn drop<T>(_x: T) { }
/// Interprets `src` as having type `&U`, and then reads `src` without moving
/// the contained value.
///
/// This function will unsafely assume the pointer `src` is valid for
/// [`size_of::<U>`][size_of] bytes by transmuting `&T` to `&U` and then reading
/// the `&U`. It will also unsafely create a copy of the contained value instead of
/// moving out of `src`.
///
/// It is not a compile-time error if `T` and `U` have different sizes, but it
/// is highly encouraged to only invoke this function where `T` and `U` have the
/// same size. This function triggers [undefined behavior][ub] if `U` is larger than
/// `T`.
///
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [size_of]: fn.size_of.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// #[repr(packed)]
/// struct Foo {
/// bar: u8,
/// }
///
/// let foo_slice = [10u8];
///
/// unsafe {
/// // Copy the data from 'foo_slice' and treat it as a 'Foo'
/// let mut foo_struct: Foo = mem::transmute_copy(&foo_slice);
/// assert_eq!(foo_struct.bar, 10);
///
/// // Modify the copied data
/// foo_struct.bar = 20;
/// assert_eq!(foo_struct.bar, 20);
/// }
///
/// // The contents of 'foo_slice' should not have changed
/// assert_eq!(foo_slice, [10]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
ptr::read(src as *const T as *const U)
}
/// Opaque type representing the discriminant of an enum.
///
/// See the [`discriminant`] function in this module for more information.
///
/// [`discriminant`]: fn.discriminant.html
#[stable(feature = "discriminant_value", since = "1.21.0")]
pub struct Discriminant<T>(u64, PhantomData<fn() -> T>);
// N.B. These trait implementations cannot be derived because we don't want any bounds on T.
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> Copy for Discriminant<T> {}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> clone::Clone for Discriminant<T> {
fn clone(&self) -> Self {
*self
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> cmp::PartialEq for Discriminant<T> {
fn eq(&self, rhs: &Self) -> bool {
self.0 == rhs.0
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> cmp::Eq for Discriminant<T> {}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> hash::Hash for Discriminant<T> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> fmt::Debug for Discriminant<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_tuple("Discriminant")
.field(&self.0)
.finish()
}
}
/// Returns a value uniquely identifying the enum variant in `v`.
///
/// If `T` is not an enum, calling this function will not result in undefined behavior, but the
/// return value is unspecified.
///
/// # Stability
///
/// The discriminant of an enum variant may change if the enum definition changes. A discriminant
/// of some variant will not change between compilations with the same compiler.
///
/// # Examples
///
/// This can be used to compare enums that carry data, while disregarding
/// the actual data:
///
/// ```
/// use std::mem;
///
/// enum Foo { A(&'static str), B(i32), C(i32) }
///
/// assert!(mem::discriminant(&Foo::A("bar")) == mem::discriminant(&Foo::A("baz")));
/// assert!(mem::discriminant(&Foo::B(1)) == mem::discriminant(&Foo::B(2)));
/// assert!(mem::discriminant(&Foo::B(3)) != mem::discriminant(&Foo::C(3)));
/// ```
#[stable(feature = "discriminant_value", since = "1.21.0")]
pub fn discriminant<T>(v: &T) -> Discriminant<T> {
unsafe {
Discriminant(intrinsics::discriminant_value(v), PhantomData)
}
}
/// A wrapper to inhibit compiler from automatically calling `T`’s destructor.
///
/// This wrapper is 0-cost.
///
/// # Examples
///
/// This wrapper helps with explicitly documenting the drop order dependencies between fields of
/// the type:
///
/// ```rust
/// use std::mem::ManuallyDrop;
/// struct Peach;
/// struct Banana;
/// struct Melon;
/// struct FruitBox {
/// // Immediately clear there’s something non-trivial going on with these fields.
/// peach: ManuallyDrop<Peach>,
/// melon: Melon, // Field that’s independent of the other two.
/// banana: ManuallyDrop<Banana>,
/// }
///
/// impl Drop for FruitBox {
/// fn drop(&mut self) {
/// unsafe {
/// // Explicit ordering in which field destructors are run specified in the intuitive
/// // location – the destructor of the structure containing the fields.
/// // Moreover, one can now reorder fields within the struct however much they want.
/// ManuallyDrop::drop(&mut self.peach);
/// ManuallyDrop::drop(&mut self.banana);
/// }
/// // After destructor for `FruitBox` runs (this function), the destructor for Melon gets
/// // invoked in the usual manner, as it is not wrapped in `ManuallyDrop`.
/// }
/// }
/// ```
#[stable(feature = "manually_drop", since = "1.20.0")]
#[lang = "manually_drop"]
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct ManuallyDrop<T: ?Sized> {
value: T,
}
impl<T> ManuallyDrop<T> {
/// Wrap a value to be manually dropped.
///
/// # Examples
///
/// ```rust
/// use std::mem::ManuallyDrop;
/// ManuallyDrop::new(Box::new(()));
/// ```
#[stable(feature = "manually_drop", since = "1.20.0")]
#[rustc_const_unstable(feature = "const_manually_drop_new")]
#[inline]
pub const fn new(value: T) -> ManuallyDrop<T> {
ManuallyDrop { value }
}
/// Extract the value from the `ManuallyDrop` container.
///
/// This allows the value to be dropped again.
///
/// # Examples
///
/// ```rust
/// use std::mem::ManuallyDrop;
/// let x = ManuallyDrop::new(Box::new(()));
/// let _: Box<()> = ManuallyDrop::into_inner(x); // This drops the `Box`.
/// ```
#[stable(feature = "manually_drop", since = "1.20.0")]
#[inline]
pub fn into_inner(slot: ManuallyDrop<T>) -> T {
slot.value
}
}
impl<T: ?Sized> ManuallyDrop<T> {
/// Manually drops the contained value.
///
/// If you have ownership of the value, you can use [`ManuallyDrop::into_inner`] instead.
///
/// # Safety
///
/// This function runs the destructor of the contained value and thus the wrapped value
/// now represents uninitialized data. It is up to the user of this method to ensure the
/// uninitialized data is not actually used.
///
/// [`ManuallyDrop::into_inner`]: #method.into_inner
#[stable(feature = "manually_drop", since = "1.20.0")]
#[inline]
pub unsafe fn drop(slot: &mut ManuallyDrop<T>) {
ptr::drop_in_place(&mut slot.value)
}
}
#[stable(feature = "manually_drop", since = "1.20.0")]
impl<T: ?Sized> Deref for ManuallyDrop<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.value
}
}
#[stable(feature = "manually_drop", since = "1.20.0")]
impl<T: ?Sized> DerefMut for ManuallyDrop<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.value
}
}
| 31.166504 | 98 | 0.612191 |
89715a7b8e9d4557066577979d8b9fd10f95a876 | 1,191 | // Elided lifetimes within the type of a const generic parameters is disallowed. This matches the
// behaviour of trait bounds where `fn foo<T: Ord<&u8>>() {}` is illegal. Though we could change
// elided lifetimes within the type of a const generic parameters to be 'static, like elided
// lifetimes within const/static items.
// revisions: full min
#![cfg_attr(full, feature(const_generics))]
#![cfg_attr(full, allow(incomplete_features))]
struct A<const N: &u8>;
//~^ ERROR `&` without an explicit lifetime name cannot be used here
//[min]~^^ ERROR `&'static u8` is forbidden
trait B {}
impl<const N: &u8> A<N> {
//~^ ERROR `&` without an explicit lifetime name cannot be used here
//[min]~^^ ERROR `&'static u8` is forbidden
fn foo<const M: &u8>(&self) {}
//~^ ERROR `&` without an explicit lifetime name cannot be used here
//[min]~^^ ERROR `&'static u8` is forbidden
}
impl<const N: &u8> B for A<N> {}
//~^ ERROR `&` without an explicit lifetime name cannot be used here
//[min]~^^ ERROR `&'static u8` is forbidden
fn bar<const N: &u8>() {}
//~^ ERROR `&` without an explicit lifetime name cannot be used here
//[min]~^^ ERROR `&'static u8` is forbidden
fn main() {}
| 37.21875 | 97 | 0.678421 |
e4fc4fa0626a727e8ee7aaf36f283a5a1802e4a8 | 292 | fn main() {
println!("{}", gcd(51, 15));
assert_eq!(gcd(51, 15), 3);
println!("{}", gcd(15, 51));
assert_eq!(gcd(15, 51), 3);
}
fn gcd(m: u64, n: u64) -> u64{
// Base case
if n == 0
{
return m;
}
// Recursive call
return gcd(n, m % n);
} | 13.904762 | 32 | 0.438356 |
eb267911d1f20a21b6a378a4a5509fdd86a8649e | 2,242 | #![allow(non_snake_case)]
use crate::aliases::WinResult;
use crate::com::iunknown::ComPtr;
use crate::com::idl::ipersist::{IPersistT, IPersistVT};
use crate::ffi::{HRESULT, PVOID};
use crate::privs::hr_to_winresult_bool;
/// [`IMediaFilter`](crate::dshow::IMediaFilter) virtual table.
#[repr(C)]
pub struct IMediaFilterVT {
pub IPersistVT: IPersistVT,
pub Stop: fn(ComPtr) -> HRESULT,
pub Pause: fn(ComPtr) -> HRESULT,
pub Run: fn(ComPtr, i64) -> HRESULT,
pub GetState: fn(ComPtr, i64, PVOID, *mut u32) -> HRESULT,
pub SetSyncSource: fn(ComPtr, ComPtr) -> HRESULT,
pub GetSyncSource: fn(ComPtr, *mut ComPtr) -> HRESULT,
}
/// [`IMediaFilter`](https://docs.microsoft.com/en-us/windows/win32/api/strmif/nn-strmif-imediafilter)
/// COM interface over [`IMediaFilterVT`](crate::dshow::vt::IMediaFilterVT).
///
/// Automatically calls
/// [`IUnknown::Release`](https://docs.microsoft.com/en-us/windows/win32/api/unknwn/nf-unknwn-iunknown-release)
/// when the object goes out of scope.
pub struct IMediaFilter(ComPtr);
impl_iunknown!(IMediaFilter, 0x56a86899, 0x0ad4, 0x11ce, 0xb03a, 0x0020af0ba770);
impl IPersistT for IMediaFilter {}
impl IMediaFilterT for IMediaFilter {}
/// Exposes the [`IMediaFilter`](crate::dshow::IMediaFilter) methods.
pub trait IMediaFilterT: IPersistT {
/// [`IMediaFilter::Pause`](https://docs.microsoft.com/en-us/windows/win32/api/strmif/nf-strmif-imediafilter-pause)
/// method.
fn Pause(&self) -> WinResult<bool> {
unsafe {
let vt = &**(self.ptr().0 as *mut *mut IMediaFilterVT);
hr_to_winresult_bool((vt.Pause)(self.ptr()))
}
}
/// [`IMediaFilter::Run`](https://docs.microsoft.com/en-us/windows/win32/api/strmif/nf-strmif-imediafilter-run)
/// method.
fn Run(&self, start: i64) -> WinResult<bool> {
unsafe {
let vt = &**(self.ptr().0 as *mut *mut IMediaFilterVT);
hr_to_winresult_bool((vt.Run)(self.ptr(), start))
}
}
/// [`IMediaFilter::Stop`](https://docs.microsoft.com/en-us/windows/win32/api/strmif/nf-strmif-imediafilter-stop)
/// method.
fn Stop(&self) -> WinResult<bool> {
unsafe {
let vt = &**(self.ptr().0 as *mut *mut IMediaFilterVT);
hr_to_winresult_bool((vt.Stop)(self.ptr()))
}
}
}
| 36.16129 | 117 | 0.680642 |
2f931eb6134999542566efe7e91fd7086adb22ff | 13,487 | pub mod groups;
mod btree;
mod complex;
mod fenwick_tree;
mod fft;
mod heap;
mod huffman;
// mod ord_btree;
mod ref_stack;
mod segment_tree;
mod stack_vec;
mod union_find;
// use complex::Complex64;
use fft::{eval_poly, fft2, ifft2, PrintPoly};
// use huffman::Huffman;
pub use btree::BTree;
pub use fenwick_tree::FenwickTree;
pub use heap::{MaxHeap, MinHeap};
// pub use ord_btree::{OrdBTree, OrdSize, OrdSizeOne}; // , RefMutBTreeElement};
pub use ref_stack::{OnStackRefMutStack, RefMutStack};
pub use segment_tree::SegmentTree;
pub use stack_vec::{
OuterLenStackVec, OuterLenStackVecDrain, StackVec, StackVecDrain, StackVecIntoIter,
};
pub use union_find::UnionFind;
use rand::prelude::*;
fn bench<F: FnOnce()>(name: &str, num_tabs: usize, f: F) {
use std::time::{Duration, Instant};
let start = Instant::now();
f();
let elapsed = start.elapsed();
print!("BENCH `{}` :", name);
for _ in 0..num_tabs {
print!("\t");
}
if elapsed < Duration::from_millis(1) {
println!(
"{} {:03} nanos",
elapsed.as_micros(),
elapsed.as_nanos() % 1000,
);
} else if elapsed < Duration::from_secs(1) {
println!(
"{} {:03} micros",
elapsed.as_millis(),
elapsed.as_micros() % 1000,
);
} else {
println!(
"{} {:03} millis",
elapsed.as_secs(),
elapsed.subsec_millis(),
);
}
}
#[allow(dead_code)]
fn bench_prefix_sum_dstruct() {
let mut rng = SmallRng::from_entropy();
const N: usize = 1 << 20; //1000_000;
let mut a: Vec<_> = (0..N as i32).collect();
a.shuffle(&mut rng);
let mut st = SegmentTree::new(groups::NumAdditiveGroups::<i32>::new(), N);
let mut ft = FenwickTree::new(groups::NumAdditiveGroups::<i32>::new());
ft.reserve(N);
bench("SegmentTree::build", 1, || st.build(a.iter().cloned()));
bench("FenwickTree::extend", 1, || ft.extend(a.iter().cloned()));
println!();
bench("SegmentTree::prefix_sum", 1, || {
for i in 0..N {
st.sum(0, i);
}
});
bench("FenwickTree::prefix_sum", 1, || {
for i in 0..N {
ft.prefix_sum(i);
}
});
println!();
bench("SegmentTree::update_add", 1, || {
for i in 0..N {
st.update(i, i as i32 + st.get(i));
}
});
bench("FenwickTree::update_add", 1, || {
for i in 0..N {
ft.update_add(i, i as _);
}
});
println!();
bench("SegmentTree::update_set", 1, || {
for (i, &x) in a.iter().enumerate() {
st.update(i, x);
}
});
bench("FenwickTree::update_set", 1, || {
for (i, &x) in a.iter().enumerate() {
ft.update_set(i, x);
}
});
println!();
assert_eq!(ft.get(3 * N / 4), a[3 * N / 4]);
assert_eq!(*st.get(3 * N / 4), a[3 * N / 4]);
}
#[allow(dead_code)]
fn bench_fft_dstruct() {
use num::{one, zero};
let mut rng = SmallRng::from_entropy();
let mut p: Vec<num::Complex<f64>> = vec![zero(); 0x20_000];
bench("baseline(placebo)", 1, || {
for _ in 0..20 {
for x in p.iter_mut() {
*x = num::Complex::new(rng.gen_range(-2.0..2.0), rng.gen_range(-2.0..2.0));
}
}
});
bench("ifft2", 1, || {
for _ in 0..20 {
for x in p.iter_mut() {
*x = num::Complex::new(rng.gen_range(-2.0..2.0), rng.gen_range(-2.0..2.0));
}
ifft2(&mut p);
}
});
bench("fft2", 1, || {
for _ in 0..20 {
for x in p.iter_mut() {
*x = num::Complex::new(rng.gen_range(-2.0..2.0), rng.gen_range(-2.0..2.0));
}
fft2(&mut p);
}
});
let p_mid = p.len() / 2;
p[..p_mid].fill(zero());
p[p_mid..].fill(one());
fft2(&mut p);
println!(
"{:.3}",
PrintPoly("p", &p.iter().step_by(p.len() / 32).collect::<Vec<_>>())
);
println!(
"{}",
eval_poly(&p, num::Complex::new(0.0, std::f64::consts::PI * 0.5).exp())
/ num::Complex::new(p.len() as f64, 0.0),
);
}
#[allow(dead_code)]
fn validate_btree_dstruct() {
let mut rng = SmallRng::from_entropy();
let mut map = BTree::new();
const K: usize = 64; // 4;
const N: usize = K * 1024; // 8;
let mut values = Vec::with_capacity(N);
for _ in 0..N {
values.push(rng.gen_range(0..1000_000))
}
let mut insert_perm: Vec<_> = (0..N).collect();
insert_perm.shuffle(&mut rng);
let mut get_perm: Vec<_> = insert_perm.clone();
let mut remove_perm: Vec<_> = insert_perm.clone();
println!("[Validate custom BTree]");
// println!("[Insertion Test]");
for k in 0..K {
// println!("k = {}", k);
let start = k * N / K;
let end = start + N / K;
get_perm[start..end].shuffle(&mut rng);
for &i in insert_perm[start..end].iter() {
// println!("map IS {:#?};", map);
// println!("INSERT ({}, {}) INTO map;", i, values[i]);
assert!(map.insert(i, values[i]).is_none());
// println!();
}
// println!("map IS {:#?};", map);
for &i in get_perm[..end].iter() {
// println!("SELECT value FROM map WHERE k={};", i);
assert_eq!(map.get(&i), Some(&values[i]));
}
}
// println!("[\"Removal Test\"]");
for k in (0..K).rev() {
// println!("k = {}", k);
let start = k * N / K;
let end = start + N / K;
remove_perm[start..end].shuffle(&mut rng);
for &i in remove_perm[start..end].iter() {
// println!("map IS {:#?};", map);
// println!("REMOVE (k, value) FROM map WHERE k={};", i);
assert_eq!(map.remove(&i), Some((i, values[i])));
// println!();
}
for &i in get_perm[..start].iter() {
// println!("SELECT value FROM map WHERE k={};", i);
assert_eq!(map.get(&i), Some(&values[i]));
}
}
println!("BTree VALIDATED");
println!();
}
// #[allow(dead_code)]
// fn validate_ord_btree_dstruct() {
// let mut rng = SmallRng::from_entropy();
// let mut map = OrdBTree::new();
//
// const K: usize = 64; // 16;
// const N: usize = K * 1024;
// // const K: usize = 4;
// // const N: usize = K * 8;
//
// let mut true_map = Vec::with_capacity(N);
//
// let mut insert_perm: Vec<_> = (0..N).collect();
// insert_perm.shuffle(&mut rng);
//
// let mut get_perm: Vec<_> = (0..N).collect();
// let mut remove_perm: Vec<_> = (0..N).collect();
// remove_perm.shuffle(&mut rng);
//
// println!("[Validate OrdBTree]");
// print!("Insertion Test: ");
// std::io::Write::flush(&mut std::io::stdout().lock()).unwrap();
// for k in 0..K {
// // println!("k = {}", k);
// let start = k * N / K;
// let end = start + N / K;
//
// get_perm[start..end].shuffle(&mut rng);
//
// for &val in insert_perm[start..end].iter() {
// let i = true_map.binary_search(&val).unwrap_err();
// true_map.insert(i, val);
//
// // println!("map IS {:#?};", map);
// // println!("INSERT AT {} value={} INTO map;", i, val);
// // map.check_sizes();
// assert!(map.insert(i, OrdSizeOne, val).is_ok());
// // println!();
// }
// // println!("map IS {:#?};", map);
//
// for &i in get_perm[..end].iter() {
// // println!("SELECT value AT {} FROM map;", i);
// assert_eq!(map.get(i), Some((i, &OrdSizeOne, &true_map[i])));
// }
// }
// println!("COMPLETE");
//
// print!("Removal Test: ");
// std::io::Write::flush(&mut std::io::stdout().lock()).unwrap();
// for k in (0..K).rev() {
// // println!("k = {}", k);
// let start = k * N / K;
// let end = start + N / K;
//
// for &val in remove_perm[start..end].iter() {
// let i = true_map.binary_search(&val).unwrap();
// assert_eq!(true_map.remove(i), val);
//
// // println!("map IS {:#?};", map);
// // println!("REMOVE value {} AT {} FROM map;", val, i);
// // map.check_sizes();
// assert_eq!(map.remove(i), Some((OrdSizeOne, val)));
// // println!();
// }
//
// for &i in get_perm[..start].iter() {
// // println!("SELECT value AT {} FROM map;", i);
// assert_eq!(map.get(i), Some((i, &OrdSizeOne, &true_map[i])));
// }
// }
// println!("COMPLETE");
// println!();
// println!("OrdBTree VALIDATED");
// println!();
// }
#[allow(dead_code)]
fn bench_btree_dstruct() {
let mut rng = SmallRng::from_entropy();
const N: usize = 256 * 1024; // 256 KiB
let values: Vec<_> = (0..N).map(|_| rng.gen_range(0..1000_000)).collect();
let (insert_perm, get_perm, remove_perm) = {
let mut insert_perm: Vec<_> = (0..N).collect();
let mut get_perm: Vec<_> = (0..N).collect();
let mut remove_perm: Vec<_> = (0..N).collect();
insert_perm.shuffle(&mut rng);
get_perm.shuffle(&mut rng);
remove_perm.shuffle(&mut rng);
(insert_perm, get_perm, remove_perm)
};
drop(
insert_perm
.iter()
.map(|&i| (i, values[i]))
.collect::<std::collections::BTreeMap<_, _>>(),
);
let mut std_map = std::collections::BTreeMap::new();
bench("std::collections::BTreeMap::insert", 2, || {
for &i in insert_perm.iter() {
assert!(std_map.insert(i, values[i]).is_none());
}
});
bench("std::collections::BTreeMap::get", 2, || {
for &i in get_perm.iter() {
assert_eq!(std_map.get(&i), Some(&values[i]));
}
});
bench("std::collections::BTreeMap::remove_entry", 1, || {
for &i in remove_perm.iter() {
assert_eq!(std_map.remove_entry(&i), Some((i, values[i])));
}
});
println!();
let mut map = BTree::new();
bench("BTree::insert", 5, || {
for (len, &i) in insert_perm.iter().enumerate() {
assert_eq!(map.len(), len);
assert!(map.insert(i, values[i]).is_none());
assert_eq!(map.len(), len + 1);
}
});
bench("BTree::get", 5, || {
for &i in get_perm.iter() {
assert_eq!(map.get(&i), Some(&values[i]));
}
});
bench("BTree::remove", 5, || {
for (len, &i) in remove_perm.iter().enumerate() {
assert_eq!(map.len(), N - len);
assert_eq!(map.remove(&i), Some((i, values[i])));
assert_eq!(map.len(), N - len - 1);
}
});
}
// #[allow(dead_code)]
// fn bench_ord_btree_dstruct() {
// let mut rng = SmallRng::from_entropy();
//
// const N: usize = 256 * 1024; // 256 KiB
//
// let (insert_perm, get_perm, remove_perm) = {
// let insert_perm: Vec<_> = (0..N).map(|n| rng.gen_range(0..=n)).collect();
// let remove_perm: Vec<_> = (1..=N).rev().map(|n| rng.gen_range(0..n)).collect();
// let mut get_perm: Vec<_> = (0..N).collect();
//
// get_perm.shuffle(&mut rng);
//
// (insert_perm, get_perm, remove_perm)
// };
//
// drop(get_perm.iter().collect::<std::collections::BTreeSet<_>>());
//
// let mut map = OrdBTree::new();
// bench("OrdBTree::insert", 4, || {
// for (len, &i) in insert_perm.iter().enumerate() {
// assert_eq!(map.len(), len);
// assert!(map.insert(i, OrdSizeOne, len).is_ok()); // lets hope it's correct (:
// assert_eq!(map.len(), len + 1);
// }
// });
// bench("OrdBTree::get", 5, || {
// for &i in get_perm.iter() {
// assert!(map.get(i).is_some()); // lets hope it's correct (:
// }
// });
// bench("OrdBTree::remove", 4, || {
// for (len, &i) in remove_perm.iter().enumerate() {
// assert_eq!(map.len(), N - len);
// assert!(map.remove(i).is_some()); // lets hope it's correct (:
// assert_eq!(map.len(), N - len - 1);
// }
// });
// }
#[allow(dead_code)]
fn valgrind_btree_dstruct() {
const N: usize = 128 * 1024;
let mut rng = SmallRng::from_entropy();
let mut values = Vec::with_capacity(N);
for _ in 0..N {
values.push(rng.gen_range(0..1000_000u32))
}
let mut insert_perm: Vec<_> = (0..N).collect();
insert_perm.shuffle(&mut rng);
let mut remove_perm: Vec<_> = (0..N).collect();
remove_perm.shuffle(&mut rng);
// println!("std::collections::BTreeMap");
// let mut map = std::collections::BTreeMap::new();
// for &i in insert_perm.iter() {
// map.insert(i, values[i]);
// }
// for &i in remove_perm.iter() {
// assert_eq!(map.remove_entry(&i), Some((i, values[i])));
// }
println!("BTree");
let mut map = BTree::new();
for &i in insert_perm.iter() {
map.insert(i, values[i]);
}
for &i in remove_perm.iter() {
assert_eq!(map.remove(&i), Some((i, values[i])));
}
}
#[test]
pub fn main() {
// validate_btree_dstruct();
// validate_ord_btree_dstruct();
bench_btree_dstruct();
println!();
// bench_ord_btree_dstruct();
// valgrind_btree_dstruct();
}
| 29.383442 | 92 | 0.498851 |
9bafcca8314f2d437b52cd9f62e2524c2d65d0cd | 1,597 | use serde::{Deserialize, Serialize};
use std::fmt::Display;
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct Reference {
pub source: Source,
pub page: u16,
}
impl Reference {
pub fn as_url(&self) -> String {
let base_url = match self.source {
Source::BCP1979 => "https://www.episcopalchurch.org/wp-content/uploads/sites/2/2019/11/bcp_compressed.pdf",
Source::EOW1 => "https://www.churchpublishing.org/siteassets/pdf/enriching-our-worship-1/enrichingourworship1.pdf",
Source::LFF2018 => "https://www.episcopalcommonprayer.org/uploads/1/2/9/8/129843103/lesser_feasts_and_fasts_2018_final_pages.pdf"
};
format!("{}#page={}", base_url, self.page)
}
}
impl Display for Reference {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} p. {}", self.source, self.page)
}
}
impl From<u16> for Reference {
fn from(page: u16) -> Self {
Self {
source: Source::default(),
page,
}
}
}
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub enum Source {
BCP1979,
EOW1,
LFF2018,
}
impl Display for Source {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let name = match self {
Source::BCP1979 => "BCP",
Source::EOW1 => "EOW 1",
&Self::LFF2018 => "LFF 2018",
};
write!(f, "{}", name)
}
}
impl Default for Source {
fn default() -> Self {
Self::BCP1979
}
}
| 26.616667 | 141 | 0.584847 |
01c297ed4f7bd2708b2557e2b63d9b71d66c44aa | 366 | //! Generated by `codegen/create_gen_bif.py`
//! Creates a lookup table of BIF functions
//! Config used: OTP22
#![allow(dead_code)]
use crate::{native_fun, defs::Arity, term::value::*};
pub struct BifTabItem {
pub m: Term,
pub f: Term,
pub arity: Arity,
pub func: native_fun::NativeFn
}
pub static BIF_TABLE: &'static [BifTabItem] = &[
];
| 18.3 | 53 | 0.653005 |
7ac75f30f468bb03d3a41bd5e4acd7bea8ab156d | 88 | net.sf.jasperreports.engine.json.expression.filter.evaluation.FilterExpressionEvaluator
| 44 | 87 | 0.897727 |
507a974dec001227725011929a4ea6895dce2ad0 | 702 | #[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
_reserved0: [u8; 0x10],
#[doc = "0x10 - OHCI Interrupt Configuration Register"]
pub ohciicr: crate::Reg<ohciicr::OHCIICR_SPEC>,
_reserved1: [u8; 0x1c],
#[doc = "0x30 - UTMI Clock Trimming Register"]
pub cktrim: crate::Reg<cktrim::CKTRIM_SPEC>,
}
#[doc = "OHCIICR register accessor: an alias for `Reg<OHCIICR_SPEC>`"]
pub type OHCIICR = crate::Reg<ohciicr::OHCIICR_SPEC>;
#[doc = "OHCI Interrupt Configuration Register"]
pub mod ohciicr;
#[doc = "CKTRIM register accessor: an alias for `Reg<CKTRIM_SPEC>`"]
pub type CKTRIM = crate::Reg<cktrim::CKTRIM_SPEC>;
#[doc = "UTMI Clock Trimming Register"]
pub mod cktrim;
| 36.947368 | 70 | 0.696581 |
4ab452c8415b766420feeed0de0ff8a8dda8af42 | 1,770 | //! Defines the Channel field.
use crate::field::{Field, FromArray};
bitflags! {
/// Flags describing the channel.
pub struct Flags: u16 {
/// Turbo channel.
const TURBO = 0x0010;
/// Complementary Code Keying (CCK) channel.
const CCK = 0x0020;
/// Orthogonal Frequency-Division Multiplexing (OFDM) channel.
const OFDM = 0x0040;
/// 2 GHz spectrum channel.
const GHZ2 = 0x0080;
/// 5 GHz spectrum channel.
const GHZ5 = 0x0100;
/// Only passive scan allowed.
const PASSIVE = 0x0200;
/// Dynamic CCK-OFDM channel.
const DYNAMIC = 0x0400;
/// Gaussian Frequency Shift Keying (GFSK) channel.
const GFSK = 0x0800;
/// GSM (900MHz) channel.
const GSM = 0x1000;
/// Static Turbo channel.
const STURBO = 0x2000;
/// Half rate channel.
const HALF = 0x4000;
/// Quarter rate channel.
const QUARTER = 0x8000;
}
}
/// Channel information.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Field, FromArray)]
#[field(align = 2, size = 4)]
pub struct Channel {
freq: u16,
#[field(size = 2)]
flags: Flags,
}
impl Channel {
/// Returns the channel frequency in MHz.
pub const fn freq(&self) -> u16 {
self.freq
}
/// Returns flags describing the channel.
pub const fn flags(&self) -> Flags {
self.flags
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::hex::FromHex;
#[test]
fn basic() {
assert_eq!(
Channel::from_hex("9e098004"),
Channel {
freq: 2462,
flags: Flags::GHZ2 | Flags::DYNAMIC
}
);
}
}
| 24.246575 | 85 | 0.548588 |
295ea7747d0df4593f5a567d6e69d1099a1464d4 | 893 | use super::*;
mod with_atom_destination;
mod with_local_pid_destination;
#[test]
fn without_atom_or_pid_destination_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
milliseconds(),
strategy::term::is_not_send_after_destination(arc_process.clone()),
strategy::term(arc_process.clone()),
)
},
|(arc_process, milliseconds, destination, message)| {
let time = arc_process.integer(milliseconds);
prop_assert_badarg!(
erlang::start_timer_3::result(arc_process.clone(), time, destination, message),
format!(
"destination ({}) is neither a registered name (atom) nor a local pid",
destination
)
);
Ok(())
},
);
}
| 27.90625 | 95 | 0.528555 |
5d98d9e46d6d92edc21ddcec610bf4832ed4af17 | 7,772 | // Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use super::BytesKey;
use crate::node::Node;
use crate::{Error, Hash, HashAlgorithm, Sha256, DEFAULT_BIT_WIDTH};
use cid::{multihash::Blake2b256, Cid};
use ipld_blockstore::BlockStore;
use serde::{de::DeserializeOwned, Serialize, Serializer};
use std::borrow::Borrow;
use std::error::Error as StdError;
use std::marker::PhantomData;
/// Implementation of the HAMT data structure for IPLD.
///
/// # Examples
///
/// ```
/// use ipld_hamt::Hamt;
///
/// let store = db::MemoryDB::default();
///
/// let mut map: Hamt<_, _, usize> = Hamt::new(&store);
/// map.set(1, "a".to_string()).unwrap();
/// assert_eq!(map.get(&1).unwrap(), Some(&"a".to_string()));
/// assert_eq!(map.delete(&1).unwrap(), Some((1, "a".to_string())));
/// assert_eq!(map.get::<_>(&1).unwrap(), None);
/// let cid = map.flush().unwrap();
/// ```
#[derive(Debug)]
pub struct Hamt<'a, BS, V, K = BytesKey, H = Sha256> {
root: Node<K, V, H>,
store: &'a BS,
bit_width: u32,
hash: PhantomData<H>,
}
impl<BS, V, K, H> Serialize for Hamt<'_, BS, V, K, H>
where
K: Serialize,
V: Serialize,
H: HashAlgorithm,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.root.serialize(serializer)
}
}
impl<'a, K: PartialEq, V: PartialEq, S: BlockStore, H: HashAlgorithm> PartialEq
for Hamt<'a, S, V, K, H>
{
fn eq(&self, other: &Self) -> bool {
self.root == other.root
}
}
impl<'a, BS, V, K, H> Hamt<'a, BS, V, K, H>
where
K: Hash + Eq + PartialOrd + Serialize + DeserializeOwned,
V: Serialize + DeserializeOwned,
BS: BlockStore,
H: HashAlgorithm,
{
pub fn new(store: &'a BS) -> Self {
Self::new_with_bit_width(store, DEFAULT_BIT_WIDTH)
}
/// Construct hamt with a bit width
pub fn new_with_bit_width(store: &'a BS, bit_width: u32) -> Self {
Self {
root: Node::default(),
store,
bit_width,
hash: Default::default(),
}
}
/// Lazily instantiate a hamt from this root Cid.
pub fn load(cid: &Cid, store: &'a BS) -> Result<Self, Error> {
Self::load_with_bit_width(cid, store, DEFAULT_BIT_WIDTH)
}
/// Lazily instantiate a hamt from this root Cid with a specified bit width.
pub fn load_with_bit_width(cid: &Cid, store: &'a BS, bit_width: u32) -> Result<Self, Error> {
match store.get(cid)? {
Some(root) => Ok(Self {
root,
store,
bit_width,
hash: Default::default(),
}),
None => Err(Error::CidNotFound(cid.to_string())),
}
}
/// Sets the root based on the Cid of the root node using the Hamt store
pub fn set_root(&mut self, cid: &Cid) -> Result<(), Error> {
match self.store.get(cid)? {
Some(root) => self.root = root,
None => return Err(Error::CidNotFound(cid.to_string())),
}
Ok(())
}
/// Returns a reference to the underlying store of the Hamt.
pub fn store(&self) -> &'a BS {
self.store
}
/// Inserts a key-value pair into the HAMT.
///
/// If the HAMT did not have this key present, `None` is returned.
///
/// If the HAMT did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though;
///
/// # Examples
///
/// ```
/// use ipld_hamt::Hamt;
///
/// let store = db::MemoryDB::default();
///
/// let mut map: Hamt<_, _, usize> = Hamt::new(&store);
/// map.set(37, "a".to_string()).unwrap();
/// assert_eq!(map.is_empty(), false);
///
/// map.set(37, "b".to_string()).unwrap();
/// map.set(37, "c".to_string()).unwrap();
/// ```
pub fn set(&mut self, key: K, value: V) -> Result<(), Error> {
self.root.set(key, value, self.store, self.bit_width)
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use ipld_hamt::Hamt;
///
/// let store = db::MemoryDB::default();
///
/// let mut map: Hamt<_, _, usize> = Hamt::new(&store);
/// map.set(1, "a".to_string()).unwrap();
/// assert_eq!(map.get(&1).unwrap(), Some(&"a".to_string()));
/// assert_eq!(map.get(&2).unwrap(), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Result<Option<&V>, Error>
where
K: Borrow<Q>,
Q: Hash + Eq,
V: DeserializeOwned,
{
match self.root.get(k, self.store, self.bit_width)? {
Some(v) => Ok(Some(v)),
None => Ok(None),
}
}
/// Returns `true` if a value exists for the given key in the HAMT.
///
/// The key may be any borrowed form of the map's key type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use ipld_hamt::Hamt;
///
/// let store = db::MemoryDB::default();
///
/// let mut map: Hamt<_, _, usize> = Hamt::new(&store);
/// map.set(1, "a".to_string()).unwrap();
/// assert_eq!(map.contains_key(&1).unwrap(), true);
/// assert_eq!(map.contains_key(&2).unwrap(), false);
/// ```
#[inline]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> Result<bool, Error>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
Ok(self.root.get(k, self.store, self.bit_width)?.is_some())
}
/// Removes a key from the HAMT, returning the value at the key if the key
/// was previously in the HAMT.
///
/// The key may be any borrowed form of the HAMT's key type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the key type.
///
/// # Examples
///
/// ```
/// use ipld_hamt::Hamt;
///
/// let store = db::MemoryDB::default();
///
/// let mut map: Hamt<_, _, usize> = Hamt::new(&store);
/// map.set(1, "a".to_string()).unwrap();
/// assert_eq!(map.delete(&1).unwrap(), Some((1, "a".to_string())));
/// assert_eq!(map.delete(&1).unwrap(), None);
/// ```
pub fn delete<Q: ?Sized>(&mut self, k: &Q) -> Result<Option<(K, V)>, Error>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.root.remove_entry(k, self.store, self.bit_width)
}
/// Flush root and return Cid for hamt
pub fn flush(&mut self) -> Result<Cid, Error> {
self.root.flush(self.store)?;
Ok(self.store.put(&self.root, Blake2b256)?)
}
/// Returns true if the HAMT has no entries
pub fn is_empty(&self) -> bool {
self.root.is_empty()
}
/// Iterates over each KV in the Hamt and runs a function on the values.
///
/// This function will constrain all values to be of the same type
///
/// # Examples
///
/// ```
/// use ipld_hamt::Hamt;
///
/// let store = db::MemoryDB::default();
///
/// let mut map: Hamt<_, _, usize> = Hamt::new(&store);
/// map.set(1, 1).unwrap();
/// map.set(4, 2).unwrap();
///
/// let mut total = 0;
/// map.for_each(|_, v: &u64| {
/// total += v;
/// Ok(())
/// }).unwrap();
/// assert_eq!(total, 3);
/// ```
#[inline]
pub fn for_each<F>(&self, mut f: F) -> Result<(), Box<dyn StdError>>
where
V: DeserializeOwned,
F: FnMut(&K, &V) -> Result<(), Box<dyn StdError>>,
{
self.root.for_each(self.store, &mut f)
}
}
| 29.108614 | 97 | 0.539629 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.