hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
0ed6ef1fc48b7330ac4f899f5ab0dea1102ce625 | 1,550 | use config::{Config, ConfigError, Environment, File};
use serde_derive::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct OutpointConf {
pub tx_hash: String,
pub index: u32,
}
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct ScriptConf {
pub code_hash: String,
pub outpoint: OutpointConf,
}
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct ScriptsConf {
pub lockscript: ScriptConf,
pub typescript: ScriptConf,
}
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct PriceOracle {
pub outpoint: OutpointConf,
}
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct BtcDifficulty {
pub outpoint: OutpointConf,
}
#[derive(Deserialize, Serialize, Default, Debug, Clone)]
pub struct Settings {
pub lockscript: ScriptConf,
pub typescript: ScriptConf,
pub sudt: ScriptConf,
pub price_oracle: PriceOracle,
pub btc_difficulty_cell: BtcDifficulty,
}
impl Settings {
pub fn new(config_path: &str) -> Result<Self, ConfigError> {
let mut s = Config::new();
s.merge(File::with_name(config_path))?;
s.merge(Environment::with_prefix("app"))?;
s.try_into()
}
pub fn write(&self, config_path: &str) -> Result<(), String> {
let s = toml::to_string(self).map_err(|e| format!("toml serde error: {}", e))?;
std::fs::write(config_path, &s)
.map_err(|e| format!("fail to write scripts config. err: {}", e))?;
Ok(())
}
}
| 27.678571 | 87 | 0.667097 |
906eb2e8297fa789252aed4487144ba25c110966 | 7,367 | use std;
use std::iter;
use std::rc::Rc;
use std::cell::*;
use pcre::{CompileOption, Match, Pcre};
use super::*;
use term_color::*;
#[derive(Debug,Clone)]
pub struct DecorativeLine {
/// Single mark, such as "*".
mark: String,
/// Colors.
colors: Option<Colors>,
/// Repeated marks to fill a given width, with colors.
computed_line: String,
}
impl DecorativeLine {
pub fn new(mark: &str,
colors: Option<Colors>,
term: Term,
console_width: usize)
-> DecorativeLine {
let m = if mark.len() > 0 {
mark.to_string()
} else {
"-".to_string()
};
let len = m.len();
let mut computed = String::new();
if let Some(ref c) = colors {
computed.push_str(c.fg_code());
computed.push_str(c.bg_code());
};
computed.push_str(
&std::iter::repeat(m.clone()).take(console_width / len).collect::<String>());
if colors.is_some() {
computed.push_str(term.csi_reset());
}
DecorativeLine {
mark: m,
colors: colors,
computed_line: computed,
}
}
pub fn computed_line(&self) -> &String {
&self.computed_line
}
}
#[test]
fn test_build_decorative_line() {
assert_eq!("-----".to_string(),
*DecorativeLine::new(&"".to_string(), None, Term::Xterm, 5).computed_line());
assert_eq!("*****".to_string(),
*DecorativeLine::new(&"*".to_string(), None, Term::Xterm, 5).computed_line());
assert_eq!("*-*-".to_string(),
*DecorativeLine::new(&"*-".to_string(), None, Term::Xterm, 5).computed_line());
}
#[derive(Debug)]
struct PcreEx {
pattern: String,
re: Pcre,
negate: bool,
}
impl PcreEx {
fn compile(orig_pattern: &str) -> Result<PcreEx, RuleError> {
let bytes = orig_pattern.as_bytes();
let negate = bytes[0] == '!' as u8;
let pattern = if negate {
orig_pattern[1..].to_string()
} else {
orig_pattern.to_string()
};
let re = Pcre::compile(&pattern)
.map_err(|e| RuleError::new(&format!("Invalid regex pattern: {}", pattern)))?;
Ok(PcreEx {
pattern: orig_pattern.to_string(),
re: re,
negate: negate,
})
}
fn test(&self, line: &str) -> bool {
let mut ret = self.re.exec(line).is_some();
if self.negate {
ret = !ret;
}
ret
}
fn matches(&self, line: &str) -> Vec<(usize, usize)> {
if self.negate {
if self.test(line) {
return vec![(0, line.len())];
} else {
return vec![];
}
}
let cc = self.re.capture_count();
let mut ret = vec![];
for m in self.re.matches(line) {
let mut pusher = |i| {
if m.group_len(i) > 0 {
ret.push((m.group_start(i), m.group_end(i)))
}
};
if cc == 0 {
pusher(0);
} else {
for i in 1..(cc + 1) {
pusher(i);
}
}
}
ret
}
#[test]
fn test_matches() {
let mut pat1 = PcreEx::compile("abc").unwrap();
let mut pat2 = PcreEx::compile("(a)b(c)").unwrap();
assert_eq!(pat1.matches(&""), vec![]);
assert_eq!(pat1.matches(&"1abc2"), vec![(1, 4)]);
assert_eq!(pat1.matches(&"1abc2abc"), vec![(1, 4), (5, 8)]);
assert_eq!(pat2.matches(&""), vec![]);
assert_eq!(pat2.matches(&"1abc2"), vec![(1, 2), (3, 4)]);
assert_eq!(pat2.matches(&"1abc2abc"),
vec![(1, 2), (3, 4), (5, 6), (7, 8)]);
}
}
impl Clone for PcreEx {
fn clone(&self) -> PcreEx {
PcreEx::compile(&self.pattern).unwrap()
}
}
#[derive(Debug, Clone)]
pub struct Rule {
/// Original regex
re: PcreEx,
when_re: Option<PcreEx>,
states: Vec<String>,
next_state: Option<String>,
stop: bool,
match_colors: Option<Rc<Colors>>,
line_colors: Option<Rc<Colors>>,
pre_line: Option<DecorativeLine>,
post_line: Option<DecorativeLine>,
}
impl Rule {
pub fn new(pattern: &str) -> Result<Rule, RuleError> {
if pattern.len() == 0 {
return Err(RuleError::new("No pattern found"));
}
let re = PcreEx::compile(pattern)
.map_err(|e| RuleError::new(&format!("Invalid regex pattern: {}", pattern)))?;
Ok(Rule {
re: re,
when_re: None,
states: vec![],
next_state: None,
stop: false,
match_colors: None,
line_colors: None,
pre_line: None,
post_line: None,
})
}
pub fn set_when(&mut self, pattern: String) -> Result<&mut Rule, RuleError> {
let re = PcreEx::compile(&pattern)
.map_err(|e| RuleError::new(&format!("Invalid regex pattern: {}", pattern)))?;
self.when_re = Some(re);
Ok(self)
}
pub fn set_next_state(&mut self, state: String) -> &mut Rule {
self.next_state = Some(state.to_string());
self
}
pub fn set_states(&mut self, states: Vec<String>) -> &mut Rule {
self.states = states.clone();
self
}
pub fn set_stop(&mut self, stop: bool) -> &mut Rule {
self.stop = stop;
self
}
pub fn set_match_colors(&mut self, c: Colors) -> &mut Rule {
self.match_colors = Some(Rc::new(c));
self
}
pub fn set_line_colors(&mut self, c: Colors) -> &mut Rule {
self.line_colors = Some(Rc::new(c));
self
}
pub fn set_pre_line(&mut self, line: DecorativeLine) -> &mut Rule {
self.pre_line = Some(line);
self
}
pub fn set_post_line(&mut self, line: DecorativeLine) -> &mut Rule {
self.post_line = Some(line);
self
}
pub fn matches(&self, line: &str) -> Vec<(usize, usize)> {
if let Some(ref re) = self.when_re {
if !re.test(line) {
return vec![];
}
}
self.re.matches(line)
}
pub fn pattern(&self) -> &String {
&self.re.pattern
}
pub fn match_colors(&self) -> Option<Rc<Colors>> {
self.match_colors.as_ref().map(|x| x.clone())
}
pub fn line_colors(&self) -> Option<Rc<Colors>> {
self.line_colors.as_ref().map(|x| x.clone())
}
pub fn states(&self) -> &Vec<String> {
&self.states
}
pub fn next_state(&self) -> Option<&String> {
self.next_state.as_ref()
}
pub fn pre_line(&self) -> Option<&DecorativeLine> {
self.pre_line.as_ref()
}
pub fn post_line(&self) -> Option<&DecorativeLine> {
self.post_line.as_ref()
}
pub fn stop(&self) -> bool {
self.stop
}
}
#[test]
fn test_matches_zero_width() {
let mut pat1 = Pcre::compile("^").unwrap();
// TODO Send a pull request to fix it.
// assert_eq!(Rule::matches_inner(&mut pat1, &"abc"), vec![]);
}
#[test]
fn test_build_rule() {
let mut rule = Rule::new(&String::from("xyz")).unwrap();
rule.set_next_state("".to_string()).set_states(vec![]);
}
| 25.403448 | 94 | 0.51052 |
d7a1136310605d58a223ccd833af84166d8e6e18 | 427 | use super::system_prelude::*;
#[derive(Default)]
pub struct UpdateRotate;
impl<'a> System<'a> for UpdateRotate {
type SystemData = (ReadStorage<'a, Rotate>, WriteStorage<'a, Transform>);
fn run(&mut self, (rotate_store, mut transform_store): Self::SystemData) {
for (rotate, transform) in (&rotate_store, &mut transform_store).join()
{
transform.rotate_2d(rotate.step);
}
}
}
| 26.6875 | 79 | 0.644028 |
90ac32ed029bd0bb94091bd3413945f6d409caa5 | 3,739 | use crate::common::DataId;
use crate::config::PathStyle;
use crate::format::format_oas_template;
const API_VERSION_PATH: &str = "/v1";
const API_CREATE_PATH: &str = "create";
const API_CREATE_AND_EXERCISE_PATH: &str = "create_and_exercise";
const API_EXERCISE_PATH: &str = "exercise";
const API_FETCH_PATH: &str = "fetch";
/// A Daml OAS operation id maker.
#[derive(Debug, Clone, Copy)]
pub struct OperationIdFactory {
path_style: PathStyle,
}
impl OperationIdFactory {
pub const fn new(path_style: PathStyle) -> Self {
Self {
path_style,
}
}
/// Format a `create` operation id.
///
/// `/v1/create#Foo.Bar.MyTemplate`
pub fn create_by_id(self, template_id: &DataId) -> String {
format!(
"{}/{}{}{}",
API_VERSION_PATH,
API_CREATE_PATH,
self.path_style.separator(),
format_oas_template(template_id)
)
}
/// Format a `create_and_exercise` operation id.
///
/// `/v1/create_and_exercise#Foo.Bar.MyTemplate/MyChoice`
/// `/v1/create_and_exercise/Foo.Bar.MyTemplate/MyChoice`
///
/// Note that choice names are unique within a module and so strictly speaking we do not need to include the
/// Template name here however we do so for clarity.
pub fn create_and_exercise(self, template_id: &DataId, choice: &str) -> String {
format!(
"{}/{}{}{}/{}",
API_VERSION_PATH,
API_CREATE_AND_EXERCISE_PATH,
self.path_style.separator(),
format_oas_template(template_id),
choice
)
}
/// Format a `exercise` operation id.
///
/// `/v1/exercise#Foo.Bar.MyTemplate/MyChoice`
/// `/v1/exercise/Foo.Bar.MyTemplate/MyChoice`
///
/// Note that choice names are unique within a module and so strictly speaking we do not need to include the
/// Template name here however we do so for clarity.
pub fn exercise_by_id(self, template_id: &DataId, choice: &str) -> String {
format!(
"{}/{}{}{}/{}",
API_VERSION_PATH,
API_EXERCISE_PATH,
self.path_style.separator(),
format_oas_template(template_id),
choice
)
}
/// Format a `fetch` operation id.
///
/// `/v1/fetch#Foo.Bar.MyTemplate`
/// `/v1/fetch/Foo.Bar.MyTemplate`
pub fn fetch_by_id(self, template_id: &DataId) -> String {
format!(
"{}/{}{}{}",
API_VERSION_PATH,
API_FETCH_PATH,
self.path_style.separator(),
format_oas_template(template_id),
)
}
/// Format a `fetch` (by key) operation id.
///
/// `/v1/fetch#key/Foo.Bar.MyTemplate`
/// `/v1/fetch/key/Foo.Bar.MyTemplate`
pub fn fetch_by_key(self, template_id: &DataId) -> String {
format!(
"{}/{}{}key/{}",
API_VERSION_PATH,
API_FETCH_PATH,
self.path_style.separator(),
format_oas_template(template_id),
)
}
/// Format a `exercise` (by key) operation id.
///
/// `/v1/exercise#key/Foo.Bar.MyTemplate/MyChoice`
/// `/v1/exercise/key/Foo.Bar.MyTemplate/MyChoice`
///
/// Note that choice names are unique within a module and so strictly speaking we do not need to include the
/// Template name here however we do so for clarity.
pub fn exercise_by_key(self, template_id: &DataId, choice: &str) -> String {
format!(
"{}/{}{}key/{}/{}",
API_VERSION_PATH,
API_EXERCISE_PATH,
self.path_style.separator(),
format_oas_template(template_id),
choice
)
}
}
| 31.420168 | 112 | 0.584113 |
c1a13242871c3cc0a0d8a83a0d52ba0807ecfd4b | 1,890 | use std::collections::HashSet;
use std::fmt::Debug;
use std::hash::Hash;
use std::iter::FromIterator;
pub trait AccessPermission: Clone + Debug + Eq + Hash {
fn allowed_class() -> &'static AccessVector<Self>;
}
#[derive(Clone, Debug)]
pub struct AccessVector<T>
where T: AccessPermission
{
av: HashSet<T>,
}
impl<T> AccessVector<T>
where T: AccessPermission
{
pub fn new(av: Vec<T>) -> AccessVector<T> {
AccessVector {
av: HashSet::from_iter(av),
}
}
pub fn has(&self, perms: &AccessVector<T>) -> bool {
self.av.is_superset(&perms.av)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[derive(Clone, Eq, Hash, PartialEq, Debug)]
pub enum Sample {
Read,
Write,
Execute,
}
impl AccessPermission for Sample {
fn allowed_class() -> &'static AccessVector<Sample> {
lazy_static! {
static ref ALLOWED: AccessVector<Sample> = AccessVector::new(vec![Sample::Read, Sample::Execute]);
}
&ALLOWED
}
}
#[test]
fn has_permission() {
let permissions = AccessVector::new(vec![Sample::Read, Sample::Write, Sample::Execute]);
let check = AccessVector::new(vec![Sample::Read]);
assert_eq!(permissions.has(&check), true);
}
#[test]
fn has_no_permission() {
let permissions = AccessVector::new(vec![Sample::Write, Sample::Execute]);
let check = AccessVector::new(vec![Sample::Read]);
assert_eq!(permissions.has(&check), false);
}
#[test]
fn in_allowed_class() {
let check_true = AccessVector::new(vec![Sample::Read]);
let check_false = AccessVector::new(vec![Sample::Write]);
assert_eq!(Sample::allowed_class().has(&check_true), true);
assert_eq!(Sample::allowed_class().has(&check_false), false);
}
}
| 24.230769 | 114 | 0.593651 |
8a07f5f3aea3ea525d2a9fe4c1009cf76fc4109e | 10,823 | // Copyright (c) 2021 Quark Container Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::str;
use alloc::string::String;
use alloc::string::ToString;
use alloc::vec::Vec;
use alloc::slice;
use super::super::qlib::common::*;
use super::super::qlib::linux_def::*;
use super::super::util::cstring::*;
use super::super::qlib::addr::*;
use super::super::qlib::mem::seq::*;
use super::super::task::*;
impl Task {
//Copy a vec from user memory
pub fn CopyIn<T: Sized + Copy>(&self, addr: u64, size: usize) -> Result<Vec<T>> {
if addr == 0 && size == 0 {
return Ok(Vec::new())
}
let mut res = Vec::with_capacity(size);
let slice = self.GetSlice::<T>(addr, size)?;
for i in 0..size {
res.push(slice[i]);
}
return Ok(res);
}
//Copy a slice to user memory
pub fn CopyOutSlice<T: Sized + Copy>(&self, src: &[T], dst: u64, len: usize) -> Result<()> {
if len < src.len() {
return Err(Error::SysError(SysErr::ERANGE));
}
let dst = self.GetSliceMut::<T>(dst, src.len())?;
for i in 0..dst.len() {
dst[i] = src[i]
}
return Ok(())
}
//Copy an Object from user memory
pub fn CopyInObj<T: Sized + Copy>(&self, src: u64, dst: &mut T) -> Result<()> {
*dst = *self.GetType::<T>(src)?;
return Ok(())
}
//Copy an Object to user memory
pub fn CopyOutObj<T: Sized + Copy>(&self, src: &T, dst: u64) -> Result<()> {
*self.GetTypeMut::<T>(dst)? = *src;
return Ok(())
}
//Copy an str to user memory
pub fn CopyOutString(&self, vAddr: u64, len: usize, s: &str) -> Result<()> {
let str = CString::New(s);
self.CopyOutSlice(str.Slice(), vAddr, len)
}
// CopyStringIn copies a NUL-terminated string of unknown length from the
// memory mapped at addr in uio and returns it as a string (not including the
// trailing NUL). If the length of the string, including the terminating NUL,
// would exceed maxlen, CopyStringIn returns the string truncated to maxlen and
// ENAMETOOLONG.
pub fn CopyInString(&self, addr: u64, maxlen: usize) -> (String, Result<()>) {
let maxlen = match self.CheckPermission(addr, maxlen as u64, false, true) {
Err(e) => return ("".to_string(), Err(e)),
Ok(l) => l as usize
};
let slice = match self.GetSlice::<u8>(addr, maxlen) {
Err(e) => return ("".to_string(), Err(e)),
Ok(s) => s,
};
for i in 0..maxlen {
if slice[i] == 0 {
return (str::from_utf8(&slice[0..i]).unwrap().to_string(), Ok(()));
}
}
return (str::from_utf8(&slice[0..maxlen]).unwrap().to_string(), Err(Error::SysError(SysErr::ENAMETOOLONG)));
}
// CopyInVector copies a NULL-terminated vector of strings from the task's
// memory. The copy will fail with syscall.EFAULT if it traverses
// user memory that is unmapped or not readable by the user.
//
// maxElemSize is the maximum size of each individual element.
//
// maxTotalSize is the maximum total length of all elements plus the total
// number of elements. For example, the following strings correspond to
// the following set of sizes:
//
// { "a", "b", "c" } => 6 (3 for lengths, 3 for elements)
// { "abc" } => 4 (3 for length, 1 for elements)
pub fn CopyInVector(&self, addr: u64, maxElemSize: usize, maxTotalSize: i32) -> Result<Vec<String>> {
if addr == 0 {
return Ok(Vec::new());
}
let mut maxTotalSize = maxTotalSize;
let maxlen = self.CheckPermission(addr, maxElemSize as u64* 8, false, true)? as usize;
let addresses = self.GetSlice::<u64>(addr, maxlen/8)?;
let mut v = Vec::new();
for i in 0..addresses.len() {
let ptr = addresses[i];
if ptr == 0 {
return Ok(v);
}
// Each string has a zero terminating byte counted, so copying out a string
// requires at least one byte of space. Also, see the calculation below.
if maxTotalSize <= 0 {
return Err(Error::SysError(SysErr::ENOMEM));
}
let mut thisMax = maxElemSize;
if (maxTotalSize as usize) < thisMax {
thisMax = maxTotalSize as usize;
}
let maxlen = self.CheckPermission(ptr, thisMax as u64, false, true)? as usize;
let (str, err) = self.CopyInString(ptr, maxlen);
match err {
Err(e) => return Err(e),
_ => (),
}
let strlen = str.len();
v.push(str);
maxTotalSize -= (strlen as i32) + 1;
}
return Ok(v)
}
pub fn GetSlice<T: Sized>(&self, vAddr: u64, count: usize) -> Result<&[T]> {
let recordLen = core::mem::size_of::<T>();
let len = self.CheckPermission(vAddr, count as u64 * recordLen as u64, false, false)?;
let t: *const T = vAddr as *const T;
let slice = unsafe { slice::from_raw_parts(t, (len as usize) / recordLen) };
return Ok(slice)
}
pub fn GetSliceMut<T: Sized>(&self, vAddr: u64, count: usize) -> Result<&mut [T]> {
let recordLen = core::mem::size_of::<T>();
// only check whether the address is valid, if readonly, will cow
let len = self.CheckPermission(vAddr, count as u64 * recordLen as u64, true, false)?;
let t: *mut T = vAddr as *mut T;
let slice = unsafe { slice::from_raw_parts_mut(t, (len as usize) / recordLen) };
return Ok(slice)
}
pub fn CheckIOVecPermission(&self, iovs: &[IoVec], writeReq: bool) -> Result<()> {
for iov in iovs {
self.CheckPermission(iov.start, iov.len as u64, writeReq, false)?;
}
return Ok(())
}
pub fn GetType<T: Sized>(&self, vAddr: u64) -> Result<&T> {
let len = core::mem::size_of::<T>();
self.CheckPermission(vAddr, len as u64, false, false)?;
let t: *const T = vAddr as *const T;
return Ok(unsafe { &(*t) })
}
pub fn GetTypeMut<T: Sized>(&self, vAddr: u64) -> Result<&mut T> {
let len = core::mem::size_of::<T>();
// only check whether the address is valid, if readonly, will cow
self.CheckPermission(vAddr, len as u64, false, false)?;
let t: *mut T = vAddr as *mut T;
return Ok(unsafe { &mut (*t) })
}
// check whether the address range is legal.
// 1. whether the range belong to user's space
// 2. Whether the read/write permission meet requirement
// 3. if need cow, fix the page.
pub fn CheckPermission(&self, vAddr: u64, len: u64, writeReq: bool, allowPartial: bool) -> Result<u64> {
if len == 0 {
return Ok(0)
}
if vAddr == 0 {
return Err(Error::SysError(SysErr::EFAULT))
}
return self.mm.FixPermission(self, vAddr, len, writeReq, allowPartial)
}
#[cfg(not(test))]
pub fn VirtualToPhy(&self, vAddr: u64) -> Result<u64> {
let (addr, _) = self.mm.VirtualToPhy(vAddr)?;
return Ok(addr);
}
#[cfg(test)]
pub fn VirtualToPhy(&self, vAddr: u64) -> Result<u64> {
return Ok(vAddr)
}
pub fn IovsFromAddr(&self, iovs: u64, iovsnum: usize) -> Result<&mut [IoVec]> {
return self.GetSliceMut::<IoVec>(iovs, iovsnum);
}
pub fn V2P(&self, start: u64, len: u64, output: &mut Vec<IoVec>, writable: bool) -> Result<()> {
if len == 0 {
return Ok(())
}
self.CheckPermission(start, len, writable, false)?;
//etcd has such weird call, handle that with special case
if len == 0 {
match self.VirtualToPhy(start) {
Err(e) => {
info!("convert to phyaddress fail, addr = {:x} e={:?}", start, e);
return Err(Error::SysError(SysErr::EFAULT))
}
Ok(pAddr) => {
output.push(IoVec {
start: pAddr,
len: 0, //iov.len,
});
}
}
return Ok(())
}
let mut start = start;
let end = start + len;
while start < end {
let next = if Addr(start).IsPageAligned() {
start + MemoryDef::PAGE_SIZE
} else {
Addr(start).RoundUp().unwrap().0
};
match self.VirtualToPhy(start) {
Err(e) => {
info!("convert to phyaddress fail, addr = {:x} e={:?}", start, e);
return Err(Error::SysError(SysErr::EFAULT))
}
Ok(pAddr) => {
let iov = IoVec {
start: pAddr,
len: if end < next {
(end - start) as usize
} else {
(next - start) as usize
},
};
let cnt = output.len();
if cnt > 0 && output[cnt-1].End() == iov.start {
// use the last entry
output[cnt-1].len += iov.len;
} else {
output.push(iov);
}
}
}
start = next;
}
return Ok(())
}
pub fn V2PIov(&self, iov: &IoVec, output: &mut Vec<IoVec>, writable: bool) -> Result<()> {
return self.V2P(iov.start, iov.len as u64, output, writable)
}
pub fn V2PIovs(&self, iovs: &[IoVec], writable: bool, output: &mut Vec<IoVec>) -> Result<()> {
for iov in iovs {
self.V2PIov(iov, output, writable)?;
}
return Ok(())
}
pub fn V2PBlockSeq(&self, bs: BlockSeq, output: &mut Vec<IoVec>, writable: bool) -> Result<()> {
let mut bs = bs;
while !bs.IsEmpty() {
let iov = bs.Head();
self.V2PIov(&iov, output, writable)?;
bs = bs.Tail();
}
return Ok(())
}
} | 33.404321 | 116 | 0.525363 |
28ce145d7e3ed3009a626bcdc6ab091b4efc9fd3 | 6,371 | use std::fmt::Display;
use std::fmt::Formatter;
use std::ops::Add;
use strum_macros::EnumString;
pub trait Step<T: Copy>
where
Self: Copy + Clone,
{
fn step(&self, direction: T) -> Self;
fn step_by(&self, direction: T, steps: Scalar) -> Self;
}
pub type Scalar = i32;
#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct Position {
pub x: Scalar,
pub y: Scalar,
}
impl Position {
pub fn new(x: Scalar, y: Scalar) -> Position {
Position { x, y }
}
}
impl Display for Position {
fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {
write!(f, "({},{})", self.x, self.y)
}
}
impl Step<Cardinal> for Position {
fn step(&self, direction: Cardinal) -> Self {
use crate::position::Cardinal::*;
let (x, y) = match direction {
North => (self.x, self.y - 1),
South => (self.x, self.y + 1),
West => (self.x - 1, self.y),
East => (self.x + 1, self.y),
};
Position { x, y }
}
fn step_by(&self, direction: Cardinal, steps: Scalar) -> Self {
use crate::position::Cardinal::*;
let (x, y) = match direction {
North => (self.x, self.y - steps),
South => (self.x, self.y + steps),
West => (self.x - steps, self.y),
East => (self.x + steps, self.y),
};
Position { x, y }
}
}
impl Step<Direction> for Position {
fn step(&self, direction: Direction) -> Self {
use crate::position::Direction::*;
let (x, y) = match direction {
Up => (self.x, self.y - 1),
Down => (self.x, self.y + 1),
Right => (self.x + 1, self.y),
Left => (self.x - 1, self.y),
};
Position { x, y }
}
fn step_by(&self, direction: Direction, steps: i32) -> Self {
use crate::position::Direction::*;
let (x, y) = match direction {
Up => (self.x, self.y - steps),
Down => (self.x, self.y + steps),
Right => (self.x + steps, self.y),
Left => (self.x - steps, self.y),
};
Position { x, y }
}
}
impl From<(Scalar, Scalar)> for Position {
fn from(pos: (Scalar, Scalar)) -> Self {
Position { x: pos.0, y: pos.1 }
}
}
impl From<&(Scalar, Scalar)> for Position {
fn from(pos: &(Scalar, Scalar)) -> Self {
Position { x: pos.0, y: pos.1 }
}
}
impl From<(usize, usize)> for Position {
fn from(pos: (usize, usize)) -> Self {
Position {
x: pos.0 as Scalar,
y: pos.1 as Scalar,
}
}
}
impl From<&(usize, usize)> for Position {
fn from(pos: &(usize, usize)) -> Self {
Position {
x: pos.0 as Scalar,
y: pos.1 as Scalar,
}
}
}
impl Add for Position {
type Output = Position;
fn add(self, rhs: Position) -> Position {
(self.x + rhs.x, self.y + rhs.y).into()
}
}
impl Add for &Position {
type Output = Position;
fn add(self, rhs: &Position) -> Position {
(self.x + rhs.x, self.y + rhs.y).into()
}
}
impl Add<Position> for &Position {
type Output = Position;
fn add(self, rhs: Position) -> Position {
(self.x + rhs.x, self.y + rhs.y).into()
}
}
impl Add<&Position> for Position {
type Output = Position;
fn add(self, rhs: &Position) -> Position {
(self.x + rhs.x, self.y + rhs.y).into()
}
}
#[derive(EnumString, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum Turn {
Left,
Right,
}
#[derive(EnumString, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum Cardinal {
North,
South,
East,
West,
}
impl Cardinal {
pub fn turn(self, turn: Turn) -> Cardinal {
use crate::position::Cardinal::*;
use crate::position::Turn::*;
match (self, turn) {
(North, Left) => West,
(North, Right) => East,
(South, Left) => East,
(South, Right) => West,
(East, Left) => North,
(East, Right) => South,
(West, Left) => South,
(West, Right) => North,
}
}
}
#[derive(EnumString, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum Direction {
Up,
Down,
Right,
Left,
}
impl Direction {
pub fn turn(self, turn: Turn) -> Direction {
use crate::position::Direction::*;
match (self, turn) {
(Up, Turn::Left) => Left,
(Up, Turn::Right) => Right,
(Down, Turn::Left) => Right,
(Down, Turn::Right) => Left,
(Right, Turn::Left) => Up,
(Right, Turn::Right) => Down,
(Left, Turn::Left) => Down,
(Left, Turn::Right) => Up,
}
}
}
impl From<Cardinal> for Direction {
fn from(cardinal: Cardinal) -> Self {
use crate::position::Cardinal::*;
use crate::position::Direction::*;
match cardinal {
North => Up,
South => Down,
East => Right,
West => Left,
}
}
}
impl From<Direction> for Cardinal {
fn from(direction: Direction) -> Self {
use crate::position::Cardinal::*;
use crate::position::Direction::*;
match direction {
Up => North,
Down => South,
Right => East,
Left => West,
}
}
}
pub fn connect<P>(position: P) -> impl Iterator<Item = Position>
where
P: Into<Position>,
{
static OFFSETS: [Position; 4] = [
Position { x: 0, y: -1 },
Position { x: 1, y: 0 },
Position { x: 0, y: 1 },
Position { x: -1, y: 0 },
];
let position: Position = position.into();
OFFSETS.iter().map(move |&offset| position + offset)
}
pub fn connect8<P>(position: P) -> impl Iterator<Item = Position>
where
P: Into<Position>,
{
static OFFSETS: [Position; 8] = [
Position { x: 0, y: -1 },
Position { x: 1, y: -1 },
Position { x: 1, y: 0 },
Position { x: 1, y: 1 },
Position { x: 0, y: 1 },
Position { x: -1, y: 1 },
Position { x: -1, y: 0 },
Position { x: -1, y: -1 },
];
let position: Position = position.into();
OFFSETS.iter().map(move |&offset| position + offset)
}
| 24.886719 | 79 | 0.502433 |
e8b08377558a9bb02d80bb9a79851de6e442a3b2 | 26,654 | use crate::{
prelude::*,
use_string_cache,
utils::{get_iter_capacity, NoNull},
};
use ahash::AHashMap;
use arrow::array::{ArrayDataBuilder, ArrayRef, LargeListBuilder};
use arrow::datatypes::ToByteSlice;
pub use arrow::memory;
use arrow::{
array::{Array, ArrayData, PrimitiveArray},
buffer::Buffer,
};
use num::Num;
use polars_arrow::prelude::*;
use std::borrow::Cow;
use std::iter::FromIterator;
use std::marker::PhantomData;
use std::sync::Arc;
pub trait ChunkedBuilder<N, T> {
fn append_value(&mut self, val: N);
fn append_null(&mut self);
fn append_option(&mut self, opt_val: Option<N>) {
match opt_val {
Some(v) => self.append_value(v),
None => self.append_null(),
}
}
fn finish(self) -> ChunkedArray<T>;
}
pub struct BooleanChunkedBuilder {
array_builder: BooleanArrayBuilder,
field: Field,
}
impl ChunkedBuilder<bool, BooleanType> for BooleanChunkedBuilder {
/// Appends a value of type `T` into the builder
fn append_value(&mut self, v: bool) {
self.array_builder.append_value(v);
}
/// Appends a null slot into the builder
fn append_null(&mut self) {
self.array_builder.append_null();
}
fn finish(mut self) -> BooleanChunked {
let arr = Arc::new(self.array_builder.finish());
let len = arr.len();
ChunkedArray {
field: Arc::new(self.field),
chunks: vec![arr],
chunk_id: vec![len],
phantom: PhantomData,
categorical_map: None,
}
}
}
impl BooleanChunkedBuilder {
pub fn new(name: &str, capacity: usize) -> Self {
BooleanChunkedBuilder {
array_builder: BooleanArrayBuilder::new(capacity),
field: Field::new(name, DataType::Boolean),
}
}
}
pub struct PrimitiveChunkedBuilder<T>
where
T: PolarsPrimitiveType,
T::Native: Default,
{
array_builder: PrimitiveArrayBuilder<T>,
field: Field,
}
impl<T> ChunkedBuilder<T::Native, T> for PrimitiveChunkedBuilder<T>
where
T: PolarsPrimitiveType,
T::Native: Default,
{
/// Appends a value of type `T` into the builder
fn append_value(&mut self, v: T::Native) {
self.array_builder.append_value(v)
}
/// Appends a null slot into the builder
fn append_null(&mut self) {
self.array_builder.append_null()
}
fn finish(mut self) -> ChunkedArray<T> {
let arr = Arc::new(self.array_builder.finish());
let len = arr.len();
ChunkedArray {
field: Arc::new(self.field),
chunks: vec![arr],
chunk_id: vec![len],
phantom: PhantomData,
categorical_map: None,
}
}
}
impl<T> PrimitiveChunkedBuilder<T>
where
T: PolarsPrimitiveType,
{
pub fn new(name: &str, capacity: usize) -> Self {
PrimitiveChunkedBuilder {
array_builder: PrimitiveArrayBuilder::<T>::new(capacity),
field: Field::new(name, T::get_dtype()),
}
}
}
pub struct CategoricalChunkedBuilder {
array_builder: PrimitiveArrayBuilder<UInt32Type>,
field: Field,
mapping: AHashMap<String, u32>,
reverse_mapping: AHashMap<u32, String>,
}
impl CategoricalChunkedBuilder {
pub fn new(name: &str, capacity: usize) -> Self {
let mapping = AHashMap::with_capacity(128);
let reverse_mapping = AHashMap::with_capacity(128);
CategoricalChunkedBuilder {
array_builder: PrimitiveArrayBuilder::<UInt32Type>::new(capacity),
field: Field::new(name, DataType::Categorical),
mapping,
reverse_mapping,
}
}
}
impl CategoricalChunkedBuilder {
/// Appends all the values in a single lock of the global string cache.
pub fn append_values<'a, I>(&mut self, i: I)
where
I: IntoIterator<Item = Option<&'a str>>,
{
if use_string_cache() {
let mut mapping = crate::STRING_CACHE.lock_map();
for opt_s in i {
match opt_s {
Some(s) => {
let idx = match mapping.get(s) {
Some(idx) => *idx,
None => {
let idx = mapping.len() as u32;
mapping.insert(s.to_string(), idx);
idx
}
};
self.reverse_mapping.insert(idx, s.to_string());
self.array_builder.append_value(idx);
}
None => {
self.array_builder.append_null();
}
}
}
} else {
for opt_s in i {
match opt_s {
Some(s) => {
let idx = match self.mapping.get(s) {
Some(idx) => *idx,
None => {
let idx = self.mapping.len() as u32;
self.mapping.insert(s.to_string(), idx);
idx
}
};
self.reverse_mapping.insert(idx, s.to_string());
self.array_builder.append_value(idx);
}
None => {
self.array_builder.append_null();
}
}
}
}
}
}
impl ChunkedBuilder<&str, CategoricalType> for CategoricalChunkedBuilder {
fn append_value(&mut self, val: &str) {
let idx = if use_string_cache() {
let mut mapping = crate::STRING_CACHE.lock_map();
match mapping.get(val) {
Some(idx) => *idx,
None => {
let idx = mapping.len() as u32;
mapping.insert(val.to_string(), idx);
idx
}
}
} else {
match self.mapping.get(val) {
Some(idx) => *idx,
None => {
let idx = self.mapping.len() as u32;
self.mapping.insert(val.to_string(), idx);
idx
}
}
};
self.reverse_mapping.insert(idx, val.to_string());
self.array_builder.append_value(idx);
}
fn append_null(&mut self) {
self.array_builder.append_null()
}
fn finish(mut self) -> ChunkedArray<CategoricalType> {
if self.mapping.len() > u32::MAX as usize {
panic!(format!("not more than {} categories supported", u32::MAX))
};
let arr = Arc::new(self.array_builder.finish());
let len = arr.len();
self.reverse_mapping.shrink_to_fit();
ChunkedArray {
field: Arc::new(self.field),
chunks: vec![arr],
chunk_id: vec![len],
phantom: PhantomData,
categorical_map: Some(Arc::new(self.reverse_mapping)),
}
}
}
pub struct Utf8ChunkedBuilder {
pub builder: LargeStringBuilder,
pub capacity: usize,
field: Field,
}
impl Utf8ChunkedBuilder {
/// Create a new UtfChunkedBuilder
///
/// # Arguments
///
/// * `capacity` - Number of string elements in the final array.
/// * `bytes_capacity` - Number of bytes needed to store the string values.
pub fn new(name: &str, capacity: usize, bytes_capacity: usize) -> Self {
Utf8ChunkedBuilder {
builder: LargeStringBuilder::with_capacity(bytes_capacity, capacity),
capacity,
field: Field::new(name, DataType::Utf8),
}
}
/// Appends a value of type `T` into the builder
pub fn append_value<S: AsRef<str>>(&mut self, v: S) {
self.builder.append_value(v.as_ref());
}
/// Appends a null slot into the builder
pub fn append_null(&mut self) {
self.builder.append_null();
}
pub fn append_option<S: AsRef<str>>(&mut self, opt: Option<S>) {
match opt {
Some(s) => self.append_value(s.as_ref()),
None => self.append_null(),
}
}
pub fn finish(mut self) -> Utf8Chunked {
let arr = Arc::new(self.builder.finish());
let len = arr.len();
ChunkedArray {
field: Arc::new(self.field),
chunks: vec![arr],
chunk_id: vec![len],
phantom: PhantomData,
categorical_map: None,
}
}
}
pub struct Utf8ChunkedBuilderCow {
builder: Utf8ChunkedBuilder,
}
impl Utf8ChunkedBuilderCow {
pub fn new(name: &str, capacity: usize) -> Self {
Utf8ChunkedBuilderCow {
builder: Utf8ChunkedBuilder::new(name, capacity, capacity),
}
}
}
impl ChunkedBuilder<Cow<'_, str>, Utf8Type> for Utf8ChunkedBuilderCow {
fn append_value(&mut self, val: Cow<'_, str>) {
self.builder.append_value(val.as_ref())
}
fn append_null(&mut self) {
self.builder.append_null()
}
fn finish(self) -> ChunkedArray<Utf8Type> {
self.builder.finish()
}
}
pub fn build_primitive_ca_with_opt<T>(s: &[Option<T::Native>], name: &str) -> ChunkedArray<T>
where
T: PolarsPrimitiveType,
T::Native: Copy,
{
let mut builder = PrimitiveChunkedBuilder::new(name, s.len());
for opt in s {
builder.append_option(*opt);
}
builder.finish()
}
pub(crate) fn set_null_bits(
mut builder: ArrayDataBuilder,
null_bit_buffer: Option<Buffer>,
null_count: Option<usize>,
) -> ArrayDataBuilder {
match null_count {
Some(null_count) => {
if null_count > 0 {
let null_bit_buffer = null_bit_buffer
.expect("implementation error. Should not be None if null_count > 0");
builder = builder.null_bit_buffer(null_bit_buffer);
}
builder
}
None => match null_bit_buffer {
None => builder,
Some(_) => {
// this should take account into offset and length
unimplemented!()
}
},
}
}
/// Take an existing slice and a null bitmap and construct an arrow array.
pub fn build_with_existing_null_bitmap_and_slice<T>(
null_bit_buffer: Option<Buffer>,
null_count: usize,
values: &[T::Native],
) -> PrimitiveArray<T>
where
T: PolarsPrimitiveType,
{
let len = values.len();
// See:
// https://docs.rs/arrow/0.16.0/src/arrow/array/builder.rs.html#314
let builder = ArrayData::builder(T::DATA_TYPE)
.len(len)
.add_buffer(Buffer::from(values.to_byte_slice()));
let builder = set_null_bits(builder, null_bit_buffer, Some(null_count));
let data = builder.build();
PrimitiveArray::<T>::from(data)
}
/// Get the null count and the null bitmap of the arrow array
pub fn get_bitmap<T: Array + ?Sized>(arr: &T) -> (usize, Option<Buffer>) {
let data = arr.data();
(
data.null_count(),
data.null_bitmap().as_ref().map(|bitmap| {
let buff = bitmap.buffer_ref();
buff.clone()
}),
)
}
// Used in polars/src/chunked_array/apply.rs:24 to collect from aligned vecs and null bitmaps
impl<T> FromIterator<(AlignedVec<T::Native>, Option<Buffer>)> for ChunkedArray<T>
where
T: PolarsNumericType,
{
fn from_iter<I: IntoIterator<Item = (AlignedVec<T::Native>, Option<Buffer>)>>(iter: I) -> Self {
let mut chunks = vec![];
for (values, opt_buffer) in iter {
let arr = values.into_primitive_array::<T>(opt_buffer);
chunks.push(Arc::new(arr) as ArrayRef)
}
ChunkedArray::new_from_chunks("from_iter", chunks)
}
}
/// Returns the nearest number that is `>=` than `num` and is a multiple of 64
#[inline]
pub fn round_upto_multiple_of_64(num: usize) -> usize {
round_upto_power_of_2(num, 64)
}
/// Returns the nearest multiple of `factor` that is `>=` than `num`. Here `factor` must
/// be a power of 2.
fn round_upto_power_of_2(num: usize, factor: usize) -> usize {
debug_assert!(factor > 0 && (factor & (factor - 1)) == 0);
(num + (factor - 1)) & !(factor - 1)
}
/// Take an owned Vec that is 64 byte aligned and create a zero copy PrimitiveArray
/// Can also take a null bit buffer into account.
pub fn aligned_vec_to_primitive_array<T: PolarsPrimitiveType>(
values: AlignedVec<T::Native>,
null_bit_buffer: Option<Buffer>,
null_count: Option<usize>,
) -> PrimitiveArray<T> {
let vec_len = values.len();
let buffer = values.into_arrow_buffer();
let builder = ArrayData::builder(T::DATA_TYPE)
.len(vec_len)
.add_buffer(buffer);
let builder = set_null_bits(builder, null_bit_buffer, null_count);
let data = builder.build();
PrimitiveArray::<T>::from(data)
}
pub trait NewChunkedArray<T, N> {
fn new_from_slice(name: &str, v: &[N]) -> Self;
fn new_from_opt_slice(name: &str, opt_v: &[Option<N>]) -> Self;
/// Create a new ChunkedArray from an iterator.
fn new_from_opt_iter(name: &str, it: impl Iterator<Item = Option<N>>) -> Self;
/// Create a new ChunkedArray from an iterator.
fn new_from_iter(name: &str, it: impl Iterator<Item = N>) -> Self;
}
impl<T> NewChunkedArray<T, T::Native> for ChunkedArray<T>
where
T: PolarsPrimitiveType,
{
fn new_from_slice(name: &str, v: &[T::Native]) -> Self {
Self::new_from_iter(name, v.iter().copied())
}
fn new_from_opt_slice(name: &str, opt_v: &[Option<T::Native>]) -> Self {
Self::new_from_opt_iter(name, opt_v.iter().copied())
}
fn new_from_opt_iter(
name: &str,
it: impl Iterator<Item = Option<T::Native>>,
) -> ChunkedArray<T> {
let mut builder = PrimitiveChunkedBuilder::new(name, get_iter_capacity(&it));
it.for_each(|opt| builder.append_option(opt));
builder.finish()
}
/// Create a new ChunkedArray from an iterator.
fn new_from_iter(name: &str, it: impl Iterator<Item = T::Native>) -> ChunkedArray<T> {
let ca: NoNull<ChunkedArray<_>> = it.collect();
let mut ca = ca.into_inner();
ca.rename(name);
ca
}
}
impl NewChunkedArray<BooleanType, bool> for BooleanChunked {
fn new_from_slice(name: &str, v: &[bool]) -> Self {
Self::new_from_iter(name, v.iter().copied())
}
fn new_from_opt_slice(name: &str, opt_v: &[Option<bool>]) -> Self {
Self::new_from_opt_iter(name, opt_v.iter().copied())
}
fn new_from_opt_iter(
name: &str,
it: impl Iterator<Item = Option<bool>>,
) -> ChunkedArray<BooleanType> {
let mut builder = BooleanChunkedBuilder::new(name, get_iter_capacity(&it));
it.for_each(|opt| builder.append_option(opt));
builder.finish()
}
/// Create a new ChunkedArray from an iterator.
fn new_from_iter(name: &str, it: impl Iterator<Item = bool>) -> ChunkedArray<BooleanType> {
let mut ca: ChunkedArray<_> = it.collect();
ca.rename(name);
ca
}
}
impl<S> NewChunkedArray<Utf8Type, S> for Utf8Chunked
where
S: AsRef<str>,
{
fn new_from_slice(name: &str, v: &[S]) -> Self {
let values_size = v.iter().fold(0, |acc, s| acc + s.as_ref().len());
let mut builder = LargeStringBuilder::with_capacity(values_size, v.len());
v.iter().for_each(|val| {
builder.append_value(val.as_ref());
});
let field = Arc::new(Field::new(name, DataType::Utf8));
ChunkedArray {
field,
chunks: vec![Arc::new(builder.finish())],
chunk_id: vec![v.len()],
phantom: PhantomData,
categorical_map: None,
}
}
fn new_from_opt_slice(name: &str, opt_v: &[Option<S>]) -> Self {
let values_size = opt_v.iter().fold(0, |acc, s| match s {
Some(s) => acc + s.as_ref().len(),
None => acc,
});
let mut builder = Utf8ChunkedBuilder::new(name, values_size, opt_v.len());
opt_v.iter().for_each(|opt| match opt {
Some(v) => builder.append_value(v.as_ref()),
None => builder.append_null(),
});
builder.finish()
}
fn new_from_opt_iter(name: &str, it: impl Iterator<Item = Option<S>>) -> Self {
let cap = get_iter_capacity(&it);
let mut builder = Utf8ChunkedBuilder::new(name, cap, cap * 5);
it.for_each(|opt| builder.append_option(opt));
builder.finish()
}
/// Create a new ChunkedArray from an iterator.
fn new_from_iter(name: &str, it: impl Iterator<Item = S>) -> Self {
let cap = get_iter_capacity(&it);
let mut builder = Utf8ChunkedBuilder::new(name, cap, cap * 5);
it.for_each(|v| builder.append_value(v));
builder.finish()
}
}
pub trait ListBuilderTrait {
fn append_opt_series(&mut self, opt_s: Option<&Series>);
fn append_series(&mut self, s: &Series);
fn finish(&mut self) -> ListChunked;
}
pub struct ListPrimitiveChunkedBuilder<T>
where
T: PolarsPrimitiveType,
{
pub builder: LargeListBuilder<PrimitiveArrayBuilder<T>>,
field: Field,
}
macro_rules! finish_list_builder {
($self:ident) => {{
let arr = Arc::new($self.builder.finish());
let len = arr.len();
ListChunked {
field: Arc::new($self.field.clone()),
chunks: vec![arr],
chunk_id: vec![len],
phantom: PhantomData,
categorical_map: None,
}
}};
}
impl<T> ListPrimitiveChunkedBuilder<T>
where
T: PolarsPrimitiveType,
{
pub fn new(name: &str, values_builder: PrimitiveArrayBuilder<T>, capacity: usize) -> Self {
let builder = LargeListBuilder::with_capacity(values_builder, capacity);
let field = Field::new(name, DataType::List(T::get_dtype().to_arrow()));
ListPrimitiveChunkedBuilder { builder, field }
}
pub fn append_slice(&mut self, opt_v: Option<&[T::Native]>) {
match opt_v {
Some(v) => {
self.builder.values().append_slice(v);
self.builder.append(true).expect("should not fail");
}
None => {
self.builder.append(false).expect("should not fail");
}
}
}
pub fn append_null(&mut self) {
self.builder.append(false).expect("should not fail");
}
}
impl<T> ListBuilderTrait for ListPrimitiveChunkedBuilder<T>
where
T: PolarsPrimitiveType,
T::Native: Num,
{
fn append_opt_series(&mut self, opt_s: Option<&Series>) {
match opt_s {
Some(s) => self.append_series(s),
None => {
self.builder.append(false).unwrap();
}
}
}
fn append_series(&mut self, s: &Series) {
let builder = self.builder.values();
let arrays = s.chunks();
for a in arrays {
let values = a.get_values::<T>();
if a.null_count() == 0 {
builder.append_slice(values);
} else {
values.iter().enumerate().for_each(|(idx, v)| {
if a.is_valid(idx) {
builder.append_value(*v);
} else {
builder.append_null();
}
});
}
}
self.builder.append(true).unwrap();
}
fn finish(&mut self) -> ListChunked {
finish_list_builder!(self)
}
}
pub struct ListUtf8ChunkedBuilder {
builder: LargeListBuilder<LargeStringBuilder>,
field: Field,
}
impl ListUtf8ChunkedBuilder {
pub fn new(name: &str, values_builder: LargeStringBuilder, capacity: usize) -> Self {
let builder = LargeListBuilder::with_capacity(values_builder, capacity);
let field = Field::new(name, DataType::List(ArrowDataType::LargeUtf8));
ListUtf8ChunkedBuilder { builder, field }
}
}
impl ListBuilderTrait for ListUtf8ChunkedBuilder {
fn append_opt_series(&mut self, opt_s: Option<&Series>) {
match opt_s {
Some(s) => self.append_series(s),
None => {
self.builder.append(false).unwrap();
}
}
}
fn append_series(&mut self, s: &Series) {
let ca = s.utf8().unwrap();
let value_builder = self.builder.values();
for s in ca {
match s {
Some(s) => value_builder.append_value(s),
None => value_builder.append_null(),
};
}
self.builder.append(true).unwrap();
}
fn finish(&mut self) -> ListChunked {
finish_list_builder!(self)
}
}
pub struct ListBooleanChunkedBuilder {
builder: LargeListBuilder<BooleanArrayBuilder>,
field: Field,
}
impl ListBooleanChunkedBuilder {
pub fn new(name: &str, values_builder: BooleanArrayBuilder, capacity: usize) -> Self {
let builder = LargeListBuilder::with_capacity(values_builder, capacity);
let field = Field::new(name, DataType::List(ArrowDataType::Boolean));
Self { builder, field }
}
}
impl ListBuilderTrait for ListBooleanChunkedBuilder {
fn append_opt_series(&mut self, opt_s: Option<&Series>) {
match opt_s {
Some(s) => self.append_series(s),
None => {
self.builder.append(false).unwrap();
}
}
}
fn append_series(&mut self, s: &Series) {
let ca = s.bool().unwrap();
let value_builder = self.builder.values();
for s in ca {
match s {
Some(s) => value_builder.append_value(s),
None => value_builder.append_null(),
};
}
self.builder.append(true).unwrap();
}
fn finish(&mut self) -> ListChunked {
finish_list_builder!(self)
}
}
pub fn get_list_builder(
dt: &DataType,
value_capacity: usize,
list_capacity: usize,
name: &str,
) -> Box<dyn ListBuilderTrait> {
macro_rules! get_primitive_builder {
($type:ty) => {{
let values_builder = PrimitiveArrayBuilder::<$type>::new(value_capacity);
let builder = ListPrimitiveChunkedBuilder::new(&name, values_builder, list_capacity);
Box::new(builder)
}};
}
macro_rules! get_bool_builder {
() => {{
let values_builder = BooleanArrayBuilder::new(value_capacity);
let builder = ListBooleanChunkedBuilder::new(&name, values_builder, list_capacity);
Box::new(builder)
}};
}
macro_rules! get_utf8_builder {
() => {{
let values_builder =
LargeStringBuilder::with_capacity(value_capacity * 5, value_capacity);
let builder = ListUtf8ChunkedBuilder::new(&name, values_builder, list_capacity);
Box::new(builder)
}};
}
match_arrow_data_type_apply_macro!(
dt,
get_primitive_builder,
get_utf8_builder,
get_bool_builder
)
}
#[cfg(test)]
mod test {
use super::*;
use arrow::array::PrimitiveBuilder;
#[test]
fn test_primitive_builder() {
let mut builder = PrimitiveChunkedBuilder::<UInt32Type>::new("foo", 6);
let values = &[Some(1), None, Some(2), Some(3), None, Some(4)];
for val in values {
builder.append_option(*val);
}
let ca = builder.finish();
assert_eq!(Vec::from(&ca), values);
}
#[test]
fn test_existing_null_bitmap() {
let mut builder = PrimitiveBuilder::<UInt32Type>::new(3);
for val in &[Some(1), None, Some(2)] {
builder.append_option(*val).unwrap();
}
let arr = builder.finish();
let (null_count, buf) = get_bitmap(&arr);
let new_arr =
build_with_existing_null_bitmap_and_slice::<UInt32Type>(buf, null_count, &[7, 8, 9]);
assert!(new_arr.is_valid(0));
assert!(new_arr.is_null(1));
assert!(new_arr.is_valid(2));
}
#[test]
fn test_aligned_vec_allocations() {
// Can only have a zero copy to arrow memory if address of first byte % 64 == 0
// check if we can increase above initial capacity and keep the Arrow alignment
let mut v = AlignedVec::with_capacity_aligned(2);
v.push(1);
v.push(2);
v.push(3);
v.push(4);
let ptr = v.as_ptr();
assert_eq!((ptr as usize) % memory::ALIGNMENT, 0);
// check if we can shrink to fit
let mut v = AlignedVec::with_capacity_aligned(10);
v.push(1);
v.push(2);
v.shrink_to_fit();
assert_eq!(v.len(), 2);
assert_eq!(v.capacity(), 2);
let ptr = v.as_ptr();
assert_eq!((ptr as usize) % memory::ALIGNMENT, 0);
let a = aligned_vec_to_primitive_array::<Int32Type>(v, None, Some(0));
assert_eq!(&a.values()[..2], &[1, 2])
}
#[test]
fn test_list_builder() {
let values_builder = PrimitiveArrayBuilder::<Int32Type>::new(10);
let mut builder = ListPrimitiveChunkedBuilder::new("a", values_builder, 10);
// create a series containing two chunks
let mut s1 = Int32Chunked::new_from_slice("a", &[1, 2, 3]).into_series();
let s2 = Int32Chunked::new_from_slice("b", &[4, 5, 6]).into_series();
s1.append(&s2).unwrap();
builder.append_series(&s1);
builder.append_series(&s2);
let ls = builder.finish();
if let AnyValue::List(s) = ls.get_any_value(0) {
// many chunks are aggregated to one in the ListArray
assert_eq!(s.len(), 6)
} else {
assert!(false)
}
if let AnyValue::List(s) = ls.get_any_value(1) {
assert_eq!(s.len(), 3)
} else {
assert!(false)
}
// test list collect
let out = [&s1, &s2]
.iter()
.map(|s| s.clone())
.collect::<ListChunked>();
assert_eq!(out.get(0).unwrap().len(), 6);
assert_eq!(out.get(1).unwrap().len(), 3);
}
#[test]
fn test_categorical_builder() {
let mut builder = CategoricalChunkedBuilder::new("foo", 10);
builder.append_value("hello");
builder.append_null();
builder.append_value("world");
let ca = builder.finish();
let v = AnyValue::Utf8("hello");
assert_eq!(ca.get_any_value(0), v);
let v = AnyValue::Null;
assert_eq!(ca.get_any_value(1), v);
}
}
| 30.392246 | 100 | 0.567307 |
2268c9b3854779d94ec34bfbc2d4c0efe359e866 | 48,949 | use crate::base::{DummyResult, ExpansionData, ExtCtxt, MacResult, TTMacroExpander};
use crate::base::{SyntaxExtension, SyntaxExtensionKind};
use crate::expand::{ensure_complete_parse, parse_ast_fragment, AstFragment, AstFragmentKind};
use crate::mbe;
use crate::mbe::macro_check;
use crate::mbe::macro_parser::parse_tt;
use crate::mbe::macro_parser::{Error, Failure, Success};
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq};
use crate::mbe::transcribe::transcribe;
use rustc_ast::ast;
use rustc_ast::token::{self, NtTT, Token, TokenKind::*};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, DiagnosticBuilder, FatalError};
use rustc_feature::Features;
use rustc_parse::parser::Parser;
use rustc_parse::Directory;
use rustc_session::parse::ParseSess;
use rustc_span::edition::Edition;
use rustc_span::hygiene::Transparency;
use rustc_span::symbol::{kw, sym, MacroRulesNormalizedIdent, Symbol};
use rustc_span::Span;
use log::debug;
use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::{mem, slice};
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
`literal`, `path`, `meta`, `tt`, `item` and `vis`";
crate struct ParserAnyMacro<'a> {
parser: Parser<'a>,
/// Span of the expansion site of the macro this parser is for
site_span: Span,
/// The ident of the macro we're parsing
macro_ident: ast::Ident,
arm_span: Span,
}
crate fn annotate_err_with_kind(
err: &mut DiagnosticBuilder<'_>,
kind: AstFragmentKind,
span: Span,
) {
match kind {
AstFragmentKind::Ty => {
err.span_label(span, "this macro call doesn't expand to a type");
}
AstFragmentKind::Pat => {
err.span_label(span, "this macro call doesn't expand to a pattern");
}
_ => {}
};
}
/// Instead of e.g. `vec![a, b, c]` in a pattern context, suggest `[a, b, c]`.
fn suggest_slice_pat(e: &mut DiagnosticBuilder<'_>, site_span: Span, parser: &Parser<'_>) {
let mut suggestion = None;
if let Ok(code) = parser.sess.source_map().span_to_snippet(site_span) {
if let Some(bang) = code.find('!') {
suggestion = Some(code[bang + 1..].to_string());
}
}
if let Some(suggestion) = suggestion {
e.span_suggestion(
site_span,
"use a slice pattern here instead",
suggestion,
Applicability::MachineApplicable,
);
} else {
e.span_label(site_span, "use a slice pattern here instead");
}
e.help(
"for more information, see https://doc.rust-lang.org/edition-guide/\
rust-2018/slice-patterns.html",
);
}
impl<'a> ParserAnyMacro<'a> {
crate fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
let ParserAnyMacro { site_span, macro_ident, ref mut parser, arm_span } = *self;
let fragment = panictry!(parse_ast_fragment(parser, kind).map_err(|mut e| {
if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
if !e.span.is_dummy() {
// early end of macro arm (#52866)
e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
}
let msg = &e.message[0];
e.message[0] = (
format!(
"macro expansion ends with an incomplete expression: {}",
msg.0.replace(", found `<eof>`", ""),
),
msg.1,
);
}
if e.span.is_dummy() {
// Get around lack of span in error (#30128)
e.replace_span_with(site_span);
if !parser.sess.source_map().is_imported(arm_span) {
e.span_label(arm_span, "in this macro arm");
}
} else if parser.sess.source_map().is_imported(parser.token.span) {
e.span_label(site_span, "in this macro invocation");
}
match kind {
AstFragmentKind::Pat if macro_ident.name == sym::vec => {
suggest_slice_pat(&mut e, site_span, parser);
}
_ => annotate_err_with_kind(&mut e, kind, site_span),
};
e
}));
// We allow semicolons at the end of expressions -- e.g., the semicolon in
// `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
// but `m!()` is allowed in expression positions (cf. issue #34706).
if kind == AstFragmentKind::Expr && parser.token == token::Semi {
parser.bump();
}
// Make sure we don't have any tokens left to parse so we don't silently drop anything.
let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
ensure_complete_parse(parser, &path, kind.name(), site_span);
fragment
}
}
struct MacroRulesMacroExpander {
name: ast::Ident,
span: Span,
transparency: Transparency,
lhses: Vec<mbe::TokenTree>,
rhses: Vec<mbe::TokenTree>,
valid: bool,
}
impl TTMacroExpander for MacroRulesMacroExpander {
fn expand<'cx>(
&self,
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
input: TokenStream,
) -> Box<dyn MacResult + 'cx> {
if !self.valid {
return DummyResult::any(sp);
}
generic_extension(
cx,
sp,
self.span,
self.name,
self.transparency,
input,
&self.lhses,
&self.rhses,
)
}
}
fn trace_macros_note(cx_expansions: &mut FxHashMap<Span, Vec<String>>, sp: Span, message: String) {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
cx_expansions.entry(sp).or_default().push(message);
}
/// Given `lhses` and `rhses`, this is the new macro we create
fn generic_extension<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
def_span: Span,
name: ast::Ident,
transparency: Transparency,
arg: TokenStream,
lhses: &[mbe::TokenTree],
rhses: &[mbe::TokenTree],
) -> Box<dyn MacResult + 'cx> {
if cx.trace_macros() {
let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(arg.clone()));
trace_macros_note(&mut cx.expansions, sp, msg);
}
// Which arm's failure should we report? (the one furthest along)
let mut best_failure: Option<(Token, &str)> = None;
// We create a base parser that can be used for the "black box" parts.
// Every iteration needs a fresh copy of that parser. However, the parser
// is not mutated on many of the iterations, particularly when dealing with
// macros like this:
//
// macro_rules! foo {
// ("a") => (A);
// ("b") => (B);
// ("c") => (C);
// // ... etc. (maybe hundreds more)
// }
//
// as seen in the `html5ever` benchmark. We use a `Cow` so that the base
// parser is only cloned when necessary (upon mutation). Furthermore, we
// reinitialize the `Cow` with the base parser at the start of every
// iteration, so that any mutated parsers are not reused. This is all quite
// hacky, but speeds up the `html5ever` benchmark significantly. (Issue
// 68836 suggests a more comprehensive but more complex change to deal with
// this situation.)
let parser = parser_from_cx(&cx.current_expansion, &cx.parse_sess, arg.clone());
for (i, lhs) in lhses.iter().enumerate() {
// try each arm's matchers
let lhs_tt = match *lhs {
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
_ => cx.span_bug(sp, "malformed macro lhs"),
};
// Take a snapshot of the state of pre-expansion gating at this point.
// This is used so that if a matcher is not `Success(..)`ful,
// then the spans which became gated when parsing the unsuccessful matcher
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
let mut gated_spans_snapshot =
mem::take(&mut *cx.parse_sess.gated_spans.spans.borrow_mut());
match parse_tt(&mut Cow::Borrowed(&parser), lhs_tt) {
Success(named_matches) => {
// The matcher was `Success(..)`ful.
// Merge the gated spans from parsing the matcher with the pre-existing ones.
cx.parse_sess.gated_spans.merge(gated_spans_snapshot);
let rhs = match rhses[i] {
// ignore delimiters
mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
_ => cx.span_bug(sp, "malformed macro rhs"),
};
let arm_span = rhses[i].span();
let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
// rhs has holes ( `$id` and `$(...)` that need filled)
let mut tts = transcribe(cx, &named_matches, rhs, transparency);
// Replace all the tokens for the corresponding positions in the macro, to maintain
// proper positions in error reporting, while maintaining the macro_backtrace.
if rhs_spans.len() == tts.len() {
tts = tts.map_enumerated(|i, mut tt| {
let mut sp = rhs_spans[i];
sp = sp.with_ctxt(tt.span().ctxt());
tt.set_span(sp);
tt
});
}
if cx.trace_macros() {
let msg = format!("to `{}`", pprust::tts_to_string(tts.clone()));
trace_macros_note(&mut cx.expansions, sp, msg);
}
let directory = Directory {
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
};
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
p.root_module_name =
cx.current_expansion.module.mod_path.last().map(|id| id.to_string());
p.last_type_ascription = cx.current_expansion.prior_type_ascription;
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return Box::new(ParserAnyMacro {
parser: p,
// Pass along the original expansion site and the name of the macro
// so we can print a useful error message if the parse of the expanded
// macro leaves unparsed tokens.
site_span: sp,
macro_ident: name,
arm_span,
});
}
Failure(token, msg) => match best_failure {
Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
_ => best_failure = Some((token, msg)),
},
Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]),
}
// The matcher was not `Success(..)`ful.
// Restore to the state before snapshotting and maybe try again.
mem::swap(&mut gated_spans_snapshot, &mut cx.parse_sess.gated_spans.spans.borrow_mut());
}
drop(parser);
let (token, label) = best_failure.expect("ran no matchers");
let span = token.span.substitute_dummy(sp);
let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
err.span_label(span, label);
if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) {
err.span_label(cx.source_map().def_span(def_span), "when calling this macro");
}
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
if let Some((arg, comma_span)) = arg.add_comma() {
for lhs in lhses {
// try each arm's matchers
let lhs_tt = match *lhs {
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
_ => continue,
};
let parser = parser_from_cx(&cx.current_expansion, &cx.parse_sess, arg.clone());
match parse_tt(&mut Cow::Borrowed(&parser), lhs_tt) {
Success(_) => {
if comma_span.is_dummy() {
err.note("you might be missing a comma");
} else {
err.span_suggestion_short(
comma_span,
"missing comma here",
", ".to_string(),
Applicability::MachineApplicable,
);
}
}
_ => {}
}
}
}
err.emit();
cx.trace_macros_diag();
DummyResult::any(sp)
}
// Note that macro-by-example's input is also matched against a token tree:
// $( $lhs:tt => $rhs:tt );+
//
// Holy self-referential!
/// Converts a macro item into a syntax extension.
pub fn compile_declarative_macro(
sess: &ParseSess,
features: &Features,
def: &ast::Item,
edition: Edition,
) -> SyntaxExtension {
let diag = &sess.span_diagnostic;
let lhs_nm = ast::Ident::new(sym::lhs, def.span);
let rhs_nm = ast::Ident::new(sym::rhs, def.span);
let tt_spec = ast::Ident::new(sym::tt, def.span);
// Parse the macro_rules! invocation
let (macro_rules, body) = match &def.kind {
ast::ItemKind::MacroDef(def) => (def.macro_rules, def.body.inner_tokens()),
_ => unreachable!(),
};
// The pattern that macro_rules matches.
// The grammar for macro_rules! is:
// $( $lhs:tt => $rhs:tt );+
// ...quasiquoting this would be nice.
// These spans won't matter, anyways
let argument_gram = vec![
mbe::TokenTree::Sequence(
DelimSpan::dummy(),
Lrc::new(mbe::SequenceRepetition {
tts: vec![
mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
mbe::TokenTree::token(token::FatArrow, def.span),
mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
],
separator: Some(Token::new(
if macro_rules { token::Semi } else { token::Comma },
def.span,
)),
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
num_captures: 2,
}),
),
// to phase into semicolon-termination instead of semicolon-separation
mbe::TokenTree::Sequence(
DelimSpan::dummy(),
Lrc::new(mbe::SequenceRepetition {
tts: vec![mbe::TokenTree::token(
if macro_rules { token::Semi } else { token::Comma },
def.span,
)],
separator: None,
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
num_captures: 0,
}),
),
];
let parser = Parser::new(sess, body, None, true, true, rustc_parse::MACRO_ARGUMENTS);
let argument_map = match parse_tt(&mut Cow::Borrowed(&parser), &argument_gram) {
Success(m) => m,
Failure(token, msg) => {
let s = parse_failure_msg(&token);
let sp = token.span.substitute_dummy(def.span);
let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
err.span_label(sp, msg);
err.emit();
FatalError.raise();
}
Error(sp, s) => {
sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
}
};
let mut valid = true;
// Extract the arguments:
let lhses = match argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
MatchedSeq(ref s) => s
.iter()
.map(|m| {
if let MatchedNonterminal(ref nt) = *m {
if let NtTT(ref tt) = **nt {
let tt = mbe::quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
return tt;
}
}
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
})
.collect::<Vec<mbe::TokenTree>>(),
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
};
let rhses = match argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
MatchedSeq(ref s) => s
.iter()
.map(|m| {
if let MatchedNonterminal(ref nt) = *m {
if let NtTT(ref tt) = **nt {
return mbe::quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
}
}
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
})
.collect::<Vec<mbe::TokenTree>>(),
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
};
for rhs in &rhses {
valid &= check_rhs(sess, rhs);
}
// don't abort iteration early, so that errors for multiple lhses can be reported
for lhs in &lhses {
valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
}
// We use CRATE_NODE_ID instead of `def.id` otherwise we may emit buffered lints for a node id
// that is not lint-checked and trigger the "failed to process buffered lint here" bug.
valid &= macro_check::check_meta_variables(sess, ast::CRATE_NODE_ID, def.span, &lhses, &rhses);
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
match transparency_error {
Some(TransparencyError::UnknownTransparency(value, span)) => {
diag.span_err(span, &format!("unknown macro transparency: `{}`", value))
}
Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) => {
diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes")
}
None => {}
}
let expander: Box<_> = Box::new(MacroRulesMacroExpander {
name: def.ident,
span: def.span,
transparency,
lhses,
rhses,
valid,
});
SyntaxExtension::new(
sess,
SyntaxExtensionKind::LegacyBang(expander),
def.span,
Vec::new(),
edition,
def.ident.name,
&def.attrs,
)
}
fn check_lhs_nt_follows(
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
lhs: &mbe::TokenTree,
) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
check_matcher(sess, features, attrs, &tts.tts)
} else {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
sess.span_diagnostic.span_err(lhs.span(), msg);
false
}
// we don't abort on errors on rejection, the driver will do that for us
// after parsing/expansion. we can report every error in every macro this way.
}
/// Checks that the lhs contains no repetition which could match an empty token
/// tree, because then the matcher would hang indefinitely.
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
use mbe::TokenTree;
for tt in tts {
match *tt {
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
TokenTree::Delimited(_, ref del) => {
if !check_lhs_no_empty_seq(sess, &del.tts) {
return false;
}
}
TokenTree::Sequence(span, ref seq) => {
if seq.separator.is_none()
&& seq.tts.iter().all(|seq_tt| match *seq_tt {
TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
TokenTree::Sequence(_, ref sub_seq) => {
sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|| sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
}
_ => false,
})
{
let sp = span.entire();
sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
return false;
}
if !check_lhs_no_empty_seq(sess, &seq.tts) {
return false;
}
}
}
}
true
}
fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
match *rhs {
mbe::TokenTree::Delimited(..) => return true,
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
}
false
}
fn check_matcher(
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
matcher: &[mbe::TokenTree],
) -> bool {
let first_sets = FirstSets::new(matcher);
let empty_suffix = TokenSet::empty();
let err = sess.span_diagnostic.err_count();
check_matcher_core(sess, features, attrs, &first_sets, matcher, &empty_suffix);
err == sess.span_diagnostic.err_count()
}
// `The FirstSets` for a matcher is a mapping from subsequences in the
// matcher to the FIRST set for that subsequence.
//
// This mapping is partially precomputed via a backwards scan over the
// token trees of the matcher, which provides a mapping from each
// repetition sequence to its *first* set.
//
// (Hypothetically, sequences should be uniquely identifiable via their
// spans, though perhaps that is false, e.g., for macro-generated macros
// that do not try to inject artificial span information. My plan is
// to try to catch such cases ahead of time and not include them in
// the precomputed mapping.)
struct FirstSets {
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
// span in the original matcher to the First set for the inner sequence `tt ...`.
//
// If two sequences have the same span in a matcher, then map that
// span to None (invalidating the mapping here and forcing the code to
// use a slow path).
first: FxHashMap<Span, Option<TokenSet>>,
}
impl FirstSets {
fn new(tts: &[mbe::TokenTree]) -> FirstSets {
use mbe::TokenTree;
let mut sets = FirstSets { first: FxHashMap::default() };
build_recur(&mut sets, tts);
return sets;
// walks backward over `tts`, returning the FIRST for `tts`
// and updating `sets` at the same time for all sequence
// substructure we find within `tts`.
fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
let mut first = TokenSet::empty();
for tt in tts.iter().rev() {
match *tt {
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
first.replace_with(tt.clone());
}
TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts[..]);
first.replace_with(delimited.open_tt(span));
}
TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts[..]);
match sets.first.entry(sp.entire()) {
Entry::Vacant(vac) => {
vac.insert(Some(subfirst.clone()));
}
Entry::Occupied(mut occ) => {
// if there is already an entry, then a span must have collided.
// This should not happen with typical macro_rules macros,
// but syntax extensions need not maintain distinct spans,
// so distinct syntax trees can be assigned the same span.
// In such a case, the map cannot be trusted; so mark this
// entry as unusable.
occ.insert(None);
}
}
// If the sequence contents can be empty, then the first
// token could be the separator token itself.
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::Token(sep.clone()));
}
// Reverse scan: Sequence comes before `first`.
if subfirst.maybe_empty
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
{
// If sequence is potentially empty, then
// union them (preserving first emptiness).
first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
} else {
// Otherwise, sequence guaranteed
// non-empty; replace first.
first = subfirst;
}
}
}
}
first
}
}
// walks forward over `tts` until all potential FIRST tokens are
// identified.
fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
use mbe::TokenTree;
let mut first = TokenSet::empty();
for tt in tts.iter() {
assert!(first.maybe_empty);
match *tt {
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
first.add_one(tt.clone());
return first;
}
TokenTree::Delimited(span, ref delimited) => {
first.add_one(delimited.open_tt(span));
return first;
}
TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst_owned;
let subfirst = match self.first.get(&sp.entire()) {
Some(&Some(ref subfirst)) => subfirst,
Some(&None) => {
subfirst_owned = self.first(&seq_rep.tts[..]);
&subfirst_owned
}
None => {
panic!("We missed a sequence during FirstSets construction");
}
};
// If the sequence contents can be empty, then the first
// token could be the separator token itself.
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::Token(sep.clone()));
}
assert!(first.maybe_empty);
first.add_all(subfirst);
if subfirst.maybe_empty
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
{
// Continue scanning for more first
// tokens, but also make sure we
// restore empty-tracking state.
first.maybe_empty = true;
continue;
} else {
return first;
}
}
}
}
// we only exit the loop if `tts` was empty or if every
// element of `tts` matches the empty sequence.
assert!(first.maybe_empty);
first
}
}
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
// (for macro-by-example syntactic variables). It also carries the
// `maybe_empty` flag; that is true if and only if the matcher can
// match an empty token sequence.
//
// The First set is computed on submatchers like `$($a:expr b),* $(c)* d`,
// which has corresponding FIRST = {$a:expr, c, d}.
// Likewise, `$($a:expr b),* $(c)+ d` has FIRST = {$a:expr, c}.
//
// (Notably, we must allow for *-op to occur zero times.)
#[derive(Clone, Debug)]
struct TokenSet {
tokens: Vec<mbe::TokenTree>,
maybe_empty: bool,
}
impl TokenSet {
// Returns a set for the empty sequence.
fn empty() -> Self {
TokenSet { tokens: Vec::new(), maybe_empty: true }
}
// Returns the set `{ tok }` for the single-token (and thus
// non-empty) sequence [tok].
fn singleton(tok: mbe::TokenTree) -> Self {
TokenSet { tokens: vec![tok], maybe_empty: false }
}
// Changes self to be the set `{ tok }`.
// Since `tok` is always present, marks self as non-empty.
fn replace_with(&mut self, tok: mbe::TokenTree) {
self.tokens.clear();
self.tokens.push(tok);
self.maybe_empty = false;
}
// Changes self to be the empty set `{}`; meant for use when
// the particular token does not matter, but we want to
// record that it occurs.
fn replace_with_irrelevant(&mut self) {
self.tokens.clear();
self.maybe_empty = false;
}
// Adds `tok` to the set for `self`, marking sequence as non-empy.
fn add_one(&mut self, tok: mbe::TokenTree) {
if !self.tokens.contains(&tok) {
self.tokens.push(tok);
}
self.maybe_empty = false;
}
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
if !self.tokens.contains(&tok) {
self.tokens.push(tok);
}
}
// Adds all elements of `other` to this.
//
// (Since this is a set, we filter out duplicates.)
//
// If `other` is potentially empty, then preserves the previous
// setting of the empty flag of `self`. If `other` is guaranteed
// non-empty, then `self` is marked non-empty.
fn add_all(&mut self, other: &Self) {
for tok in &other.tokens {
if !self.tokens.contains(tok) {
self.tokens.push(tok.clone());
}
}
if !other.maybe_empty {
self.maybe_empty = false;
}
}
}
// Checks that `matcher` is internally consistent and that it
// can legally be followed by a token `N`, for all `N` in `follow`.
// (If `follow` is empty, then it imposes no constraint on
// the `matcher`.)
//
// Returns the set of NT tokens that could possibly come last in
// `matcher`. (If `matcher` matches the empty sequence, then
// `maybe_empty` will be set to true.)
//
// Requires that `first_sets` is pre-computed for `matcher`;
// see `FirstSets::new`.
fn check_matcher_core(
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
first_sets: &FirstSets,
matcher: &[mbe::TokenTree],
follow: &TokenSet,
) -> TokenSet {
use mbe::TokenTree;
let mut last = TokenSet::empty();
// 2. For each token and suffix [T, SUFFIX] in M:
// ensure that T can be followed by SUFFIX, and if SUFFIX may be empty,
// then ensure T can also be followed by any element of FOLLOW.
'each_token: for i in 0..matcher.len() {
let token = &matcher[i];
let suffix = &matcher[i + 1..];
let build_suffix_first = || {
let mut s = first_sets.first(suffix);
if s.maybe_empty {
s.add_all(follow);
}
s
};
// (we build `suffix_first` on demand below; you can tell
// which cases are supposed to fall through by looking for the
// initialization of this variable.)
let suffix_first;
// First, update `last` so that it corresponds to the set
// of NT tokens that might end the sequence `... token`.
match *token {
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
let can_be_followed_by_any;
if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
let msg = format!("invalid fragment specifier `{}`", bad_frag);
sess.span_diagnostic
.struct_span_err(token.span(), &msg)
.help(VALID_FRAGMENT_NAMES_MSG)
.emit();
// (This eliminates false positives and duplicates
// from error messages.)
can_be_followed_by_any = true;
} else {
can_be_followed_by_any = token_can_be_followed_by_any(token);
}
if can_be_followed_by_any {
// don't need to track tokens that work with any,
last.replace_with_irrelevant();
// ... and don't need to check tokens that can be
// followed by anything against SUFFIX.
continue 'each_token;
} else {
last.replace_with(token.clone());
suffix_first = build_suffix_first();
}
}
TokenTree::Delimited(span, ref d) => {
let my_suffix = TokenSet::singleton(d.close_tt(span));
check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens
last.replace_with_irrelevant();
// also, we don't need to check delimited sequences
// against SUFFIX
continue 'each_token;
}
TokenTree::Sequence(_, ref seq_rep) => {
suffix_first = build_suffix_first();
// The trick here: when we check the interior, we want
// to include the separator (if any) as a potential
// (but not guaranteed) element of FOLLOW. So in that
// case, we make a temp copy of suffix and stuff
// delimiter in there.
//
// FIXME: Should I first scan suffix_first to see if
// delimiter is already in it before I go through the
// work of cloning it? But then again, this way I may
// get a "tighter" span?
let mut new;
let my_suffix = if let Some(sep) = &seq_rep.separator {
new = suffix_first.clone();
new.add_one_maybe(TokenTree::Token(sep.clone()));
&new
} else {
&suffix_first
};
// At this point, `suffix_first` is built, and
// `my_suffix` is some TokenSet that we can use
// for checking the interior of `seq_rep`.
let next =
check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix);
if next.maybe_empty {
last.add_all(&next);
} else {
last = next;
}
// the recursive call to check_matcher_core already ran the 'each_last
// check below, so we can just keep going forward here.
continue 'each_token;
}
}
// (`suffix_first` guaranteed initialized once reaching here.)
// Now `last` holds the complete set of NT tokens that could
// end the sequence before SUFFIX. Check that every one works with `suffix`.
'each_last: for token in &last.tokens {
if let TokenTree::MetaVarDecl(_, name, frag_spec) = *token {
for next_token in &suffix_first.tokens {
match is_in_follow(next_token, frag_spec.name) {
IsInFollow::Invalid(msg, help) => {
sess.span_diagnostic
.struct_span_err(next_token.span(), &msg)
.help(help)
.emit();
// don't bother reporting every source of
// conflict for a particular element of `last`.
continue 'each_last;
}
IsInFollow::Yes => {}
IsInFollow::No(possible) => {
let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
{
"is"
} else {
"may be"
};
let sp = next_token.span();
let mut err = sess.span_diagnostic.struct_span_err(
sp,
&format!(
"`${name}:{frag}` {may_be} followed by `{next}`, which \
is not allowed for `{frag}` fragments",
name = name,
frag = frag_spec,
next = quoted_tt_to_string(next_token),
may_be = may_be
),
);
err.span_label(
sp,
format!("not allowed after `{}` fragments", frag_spec),
);
let msg = "allowed there are: ";
match possible {
&[] => {}
&[t] => {
err.note(&format!(
"only {} is allowed after `{}` fragments",
t, frag_spec,
));
}
ts => {
err.note(&format!(
"{}{} or {}",
msg,
ts[..ts.len() - 1]
.iter()
.copied()
.collect::<Vec<_>>()
.join(", "),
ts[ts.len() - 1],
));
}
}
err.emit();
}
}
}
}
}
}
last
}
fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
if let mbe::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
frag_can_be_followed_by_any(frag_spec.name)
} else {
// (Non NT's can always be followed by anything in matchers.)
true
}
}
/// Returns `true` if a fragment of type `frag` can be followed by any sort of
/// token. We use this (among other things) as a useful approximation
/// for when `frag` can be followed by a repetition like `$(...)*` or
/// `$(...)+`. In general, these can be a bit tricky to reason about,
/// so we adopt a conservative position that says that any fragment
/// specifier which consumes at most one token tree can be followed by
/// a fragment specifier (indeed, these fragments can be followed by
/// ANYTHING without fear of future compatibility hazards).
fn frag_can_be_followed_by_any(frag: Symbol) -> bool {
match frag {
sym::item | // always terminated by `}` or `;`
sym::block | // exactly one token tree
sym::ident | // exactly one token tree
sym::literal | // exactly one token tree
sym::meta | // exactly one token tree
sym::lifetime | // exactly one token tree
sym::tt => // exactly one token tree
true,
_ =>
false,
}
}
enum IsInFollow {
Yes,
No(&'static [&'static str]),
Invalid(String, &'static str),
}
/// Returns `true` if `frag` can legally be followed by the token `tok`. For
/// fragments that can consume an unbounded number of tokens, `tok`
/// must be within a well-defined follow set. This is intended to
/// guarantee future compatibility: for example, without this rule, if
/// we expanded `expr` to include a new binary operator, we might
/// break macros that were relying on that binary operator as a
/// separator.
// when changing this do not forget to update doc/book/macros.md!
fn is_in_follow(tok: &mbe::TokenTree, frag: Symbol) -> IsInFollow {
use mbe::TokenTree;
if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
// closing a token tree can never be matched by any fragment;
// iow, we always require that `(` and `)` match, etc.
IsInFollow::Yes
} else {
match frag {
sym::item => {
// since items *must* be followed by either a `;` or a `}`, we can
// accept anything after them
IsInFollow::Yes
}
sym::block => {
// anything can follow block, the braces provide an easy boundary to
// maintain
IsInFollow::Yes
}
sym::stmt | sym::expr => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Semi => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
}
}
sym::pat => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
}
}
sym::path | sym::ty => {
const TOKENS: &[&str] = &[
"`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
"`where`",
];
match tok {
TokenTree::Token(token) => match token.kind {
OpenDelim(token::DelimToken::Brace)
| OpenDelim(token::DelimToken::Bracket)
| Comma
| FatArrow
| Colon
| Eq
| Gt
| BinOp(token::Shr)
| Semi
| BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::As || name == kw::Where => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
}
}
sym::ident | sym::lifetime => {
// being a single token, idents and lifetimes are harmless
IsInFollow::Yes
}
sym::literal => {
// literals may be of a single token, or two tokens (negative numbers)
IsInFollow::Yes
}
sym::meta | sym::tt => {
// being either a single token or a delimited sequence, tt is
// harmless
IsInFollow::Yes
}
sym::vis => {
// Explicitly disallow `priv`, on the off chance it comes back.
const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
match tok {
TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes,
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
_ => {
if token.can_begin_type() {
IsInFollow::Yes
} else {
IsInFollow::No(TOKENS)
}
}
},
TokenTree::MetaVarDecl(_, _, frag)
if frag.name == sym::ident
|| frag.name == sym::ty
|| frag.name == sym::path =>
{
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
}
}
kw::Invalid => IsInFollow::Yes,
_ => IsInFollow::Invalid(
format!("invalid fragment specifier `{}`", frag),
VALID_FRAGMENT_NAMES_MSG,
),
}
}
}
fn has_legal_fragment_specifier(
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
tok: &mbe::TokenTree,
) -> Result<(), String> {
debug!("has_legal_fragment_specifier({:?})", tok);
if let mbe::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
let frag_span = tok.span();
if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
return Err(frag_spec.to_string());
}
}
Ok(())
}
fn is_legal_fragment_specifier(
_sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
frag_name: Symbol,
_frag_span: Span,
) -> bool {
/*
* If new fragment specifiers are invented in nightly, `_sess`,
* `_features`, `_attrs`, and `_frag_span` will be useful here
* for checking against feature gates. See past versions of
* this function.
*/
match frag_name {
sym::item
| sym::block
| sym::stmt
| sym::expr
| sym::pat
| sym::lifetime
| sym::path
| sym::ty
| sym::ident
| sym::meta
| sym::tt
| sym::vis
| sym::literal
| kw::Invalid => true,
_ => false,
}
}
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match *tt {
mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token),
mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
_ => panic!(
"unexpected mbe::TokenTree::{{Sequence or Delimited}} \
in follow set checker"
),
}
}
fn parser_from_cx<'cx>(
current_expansion: &'cx ExpansionData,
sess: &'cx ParseSess,
tts: TokenStream,
) -> Parser<'cx> {
let directory = Directory {
path: current_expansion.module.directory.clone(),
ownership: current_expansion.directory_ownership,
};
Parser::new(sess, tts, Some(directory), true, true, rustc_parse::MACRO_ARGUMENTS)
}
/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
fn parse_failure_msg(tok: &Token) -> String {
match tok.kind {
token::Eof => "unexpected end of macro invocation".to_string(),
_ => format!("no rules expected the token `{}`", pprust::token_to_string(tok),),
}
}
| 39.731331 | 100 | 0.511144 |
b98c89e36a7f450ebf079f5d14eb5f1ea3403a15 | 9,887 | use {
crate::{
API_BASE_URL,
models::{
manifest::{
Manifest,
GetDestinyManifestResponse,
},
activity_mode::ActivityMode,
response::GeneralAPIResponse,
public_milestone::PublicMilestone,
user::{
GeneralUser,
HardLinkedUserMembership,
UserMembershipData,
},
profile::DestinyProfileResponse,
groupsv2::UserInfoCard,
membership::MembershipType,
locale::Locale,
historical_stats::HistoricalStatsByPeriod,
},
traits::{
id::{
BNGMembershipID,
DestinyMembershipID,
PlatformType,
CharacterID
},
component::ComponentID,
}
},
std::{
string::ToString,
path::PathBuf,
collections::HashMap,
sync::Mutex,
fs::{
File,
DirEntry,
read_dir,
read_to_string,
},
io::{
self,
Write,
Read,
},
},
anyhow::{
Result,
anyhow,
},
serde::{
Deserialize,
Serialize,
de::DeserializeOwned,
},
serde_json,
reqwest,
};
pub struct DestinyAPI {
api_key: String,
client: reqwest::Client,
}
#[derive(Serialize, Deserialize)]
struct ManifestDownloadVersion {
locales: HashMap<Locale, DownloadedDatabase>,
}
#[derive(Serialize, Deserialize)]
struct DownloadedDatabase {
version: String,
path: PathBuf,
}
impl DestinyAPI {
pub fn new(key: &str) -> DestinyAPI {
DestinyAPI {
api_key: key.to_string(),
client: reqwest::Client::new(),
}
}
pub async fn get_user_by_bungie_net_id<T: BNGMembershipID>(&self, id: &T) -> Result<GeneralUser> {
Ok(self.get_request(&format!("User/GetBungieNetUserById/{}/", id.bng_membership_id())).await?.response)
}
pub async fn get_user_by_name(&self, username: String) -> Result<Vec<GeneralUser>> {
Ok(self.get_request::<Vec<GeneralUser>>(&format!("User/SearchUsers?q={}/", username)).await?.response)
}
pub async fn get_user_by_steamid64<T: ToString>(&self, steamid: &T) -> Result<UserMembershipData> {
let hardlinked = self.get_request::<HardLinkedUserMembership>(&format!("User/GetMembershipFromHardLinkedCredential/SteamID/{}/", steamid.to_string())).await?.response;
Ok(self.get_user_membership_data_by_membershipid_destiny(&hardlinked, &hardlinked).await?)
}
pub async fn get_user_membership_data_by_membershipid_destiny<U: DestinyMembershipID, P: PlatformType>(&self, user: &U, platform: &P) -> Result<UserMembershipData> {
Ok(self.get_request(&format!("User/GetMembershipsById/{}/{}/", user.destiny_membership_id(), platform.platform_type().into_i32())).await?.response)
}
pub async fn get_user_membership_data_by_membershipid_bng<U: BNGMembershipID, P: PlatformType>(&self, user: &U, platform: &P) -> Result<UserMembershipData> {
Ok(self.get_request(&format!("User/GetMembershipsById/{}/{}/", user.bng_membership_id(), platform.platform_type().into_i32())).await?.response)
}
/// platform must not be [`BungieNet`](crate::models::membership::MembershipType::BungieNet)
pub async fn get_destiny_player_by_name<T: ToString>(&self, username: &T, platform: MembershipType) -> Result<Vec<UserInfoCard>> {
Ok(self.get_request(&format!("Destiny2/SearchDestinyPlayer/{}/{}/", platform.into_i32(), username.to_string())).await?.response)
}
/// platform must not be [`BungieNet`](crate::models::membership::MembershipType::BungieNet)
pub async fn get_components<U: DestinyMembershipID, P: PlatformType, C: ComponentID>(&self, user: &U, platform: &P, components: Vec<C>) -> Result<DestinyProfileResponse> {
let mut url = format!("Destiny2/{}/Profile/{}/?components=", platform.platform_type().into_i32(), user.destiny_membership_id());
for component in components {
url.push_str(&format!("{},", component.component_id()));
}
Ok(self.get_request(&url).await?.response)
}
pub async fn get_public_milestones(&self) -> Result<HashMap<String, PublicMilestone>> {
Ok(self.get_request("Destiny2/Milestones/").await?.response)
}
pub async fn get_historical_stats<P: PlatformType, U: DestinyMembershipID, C: CharacterID>(&self, platform: &P, user: &U, character: &C, gamemodes: Option<Vec<ActivityMode>>) -> Result<HashMap<String, HistoricalStatsByPeriod>> {
let mut url = format!("Destiny2/{}/Account/{}/Character/{}/Stats/", platform.platform_type().into_i32(), user.destiny_membership_id(), character.character_id());
if let Some(x) = gamemodes {
url.push_str("?modes=");
for i in x {
url.push_str(&format!("{},", i.to_int32()));
}
}
Ok(self.get_request(&url).await?.response)
}
pub async fn get_request<T: DeserializeOwned>(&self, url: &str) -> Result<GeneralAPIResponse<T>> {
if cfg!(debug_assertions) {
println!("API_CALL: {}/{}", API_BASE_URL, url.to_string());
}
let raw_response = self.client.get(&format!("{}/{}", API_BASE_URL, url.to_string()))
.header("X-Api-Key", self.api_key.clone())
.send()
.await?
.text()
.await?;
//println!("{:?}", raw_response);
Ok(serde_json::from_str(&raw_response)?)
}
pub async fn manifest(&self, p: PathBuf, loc: Locale) -> Result<Manifest> {
let files: Vec<io::Result<DirEntry>> = read_dir(&p)?.collect();
let res = self.call_get_manifest().await?.response;
for f in files {
if f?.file_name() == "manifestinfo.json" {
let mut manifest_info_path = p.clone();
manifest_info_path.push("manifestinfo.json");
let data: ManifestDownloadVersion = serde_json::from_str(&read_to_string(manifest_info_path)?)?;
if let Some(x) = data.locales.get(&loc) {
if x.version == res.version {
return Ok(Manifest {
database: Mutex::new(sqlite::Connection::open(&x.path)?),
version: x.version.to_owned()
});
}
}
}
}
Ok(self.download_manifest(res, p, loc).await?)
}
pub async fn manifest_up_to_date(&self, p: PathBuf, loc: Locale) -> Result<bool> {
let files: Vec<io::Result<DirEntry>> = read_dir(&p)?.collect();
let res = self.call_get_manifest().await?.response;
for f in files {
if f?.file_name() == "manifestinfo.json" {
let mut manifest_info_path = p.clone();
manifest_info_path.push("manifestinfo.json");
let data: ManifestDownloadVersion = serde_json::from_str(&read_to_string(manifest_info_path)?)?;
if let Some(x) = data.locales.get(&loc) {
if x.version == res.version {
return Ok(true);
} else {
return Ok(false)
}
}
}
}
return Err(anyhow!("Manifest not found"))
}
pub async fn manifest_unchecked(&self, p: PathBuf) -> Result<Manifest> {
Ok(Manifest {
database: Mutex::new(sqlite::Connection::open(&p)?),
version: "".to_string()
})
}
async fn call_get_manifest(&self) -> Result<GeneralAPIResponse<GetDestinyManifestResponse>> {
self.get_request::<GetDestinyManifestResponse>("Destiny2/Manifest/").await
}
pub(crate) async fn download_manifest(&self, response: GetDestinyManifestResponse, p: PathBuf, loc: Locale) -> Result<Manifest> {
// we are only interested in the MobileWorldContent database, since the other two don't contain relevant data
// MobileGearAssets contains data to render 3d models (my guess is that the mobile app uses this one to render the 3d models)
// MobileAssets is empty
// MobileWorldContent contains the defenitions of hashes <- what we are interested in
let mut mobile_world_content_file_path = p.clone();
mobile_world_content_file_path.push(format!("{}_{}.sqlite", response.version, loc));
let mobile_world_content_url = match response.mobile_world_content_paths.get(&format!("{}", loc)) {
Some(x) => x,
None => return Err(anyhow!("Locale not found in mobileWorldContentPaths")),
};
let mobile_world = Manifest::download_database(&self.client, &mobile_world_content_url, &mobile_world_content_file_path).await?;
let mut version_file_path = p.clone();
version_file_path.push("manifestinfo.json");
let mut version_file_data = {
if let Ok(mut x) = File::open(&version_file_path) {
let mut content = String::new();
x.read_to_string(&mut content)?;
serde_json::from_str(&content)?
} else {
ManifestDownloadVersion { locales: HashMap::new() }
}
};
version_file_data.locales.insert(loc, DownloadedDatabase { version: response.version.clone(), path: mobile_world_content_file_path });
let mut version_file = File::create(&version_file_path)?;
version_file.write_all(&serde_json::to_string(&version_file_data)?.as_bytes())?;
Ok(Manifest {
database: Mutex::new(mobile_world),
version: response.version.to_owned()
})
}
} | 39.706827 | 232 | 0.593507 |
876c596669c4b9e3761e2d8df80114b75fb657cd | 1,660 |
pub struct IconSubtitlesOff {
props: crate::Props,
}
impl yew::Component for IconSubtitlesOff {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><rect fill="none" height="24" width="24"/></g><g><g><path d="M20,4H6.83l8,8H20v2h-3.17l4.93,4.93C21.91,18.65,22,18.34,22,18V6C22,4.9,21.1,4,20,4z"/><path d="M1.04,3.87l1.2,1.2C2.09,5.35,2,5.66,2,6v12c0,1.1,0.9,2,2,2h13.17l2.96,2.96l1.41-1.41L2.45,2.45L1.04,3.87z M8,12v2H4 v-2H8z M14,16.83V18H4v-2h9.17L14,16.83z"/></g></g></svg>
</svg>
}
}
}
| 36.086957 | 461 | 0.58494 |
75ef6c6c4a6173a9c5f5852f41d6d6f1e1f8d0ef | 535 | use necsim_core_bond::ClosedUnitF64;
use crate::{
cogs::{Habitat, MathsCore},
landscape::Location,
};
#[allow(clippy::inline_always, clippy::inline_fn_without_body)]
#[contract_trait]
pub trait SpeciationProbability<M: MathsCore, H: Habitat<M>>:
crate::cogs::Backup + core::fmt::Debug
{
#[must_use]
#[debug_requires(habitat.contains(location), "location is inside habitat")]
fn get_speciation_probability_at_location(
&self,
location: &Location,
habitat: &H,
) -> ClosedUnitF64;
}
| 25.47619 | 79 | 0.685981 |
e50aaf86183f91acf24b49172eda1cce1de5acd9 | 3,582 | //ordinal events example
/// using app::handle_main can result in more ergonomic code, allowing any widget to handle the event, keeping ```main``` more organized.
/// However, custom event values are hard coded.
/// enum_ordanlize let us define our custom event values relative to the number of total enum values. Thus we can
/// insert or remove custom events without worrying about disjoint values.
use fltk::{app, app::*, frame::*, window::*, button::*, prelude::*, group::*};
use enum_ordinalize::Ordinalize;
use std::rc::Rc;
use std::cell::RefCell;
//specify i32 for our custom event is in i32, not i8 (the default)
#[derive(Debug, PartialEq, Eq, Ordinalize)]
#[repr(i32)]
enum CustomEvents{
AddOne = 41,
AddTwo,
AddThree,
}
struct Adder{
pack: Pack
}
impl Adder{
pub fn new()->Self{
let mut container= Pack::new(80,50,200,20,"Adder Widget");
let mut button= Button::new(0,0,50,20,"add1");
button.set_callback(move|widg|{
let _ = app::handle_main(CustomEvents::AddOne.ordinal());
});
let mut button2= Button::new(0,0,50,20,"add2");
button2.set_callback(move|widg|{
let _ = app::handle_main(CustomEvents::AddTwo.ordinal());
});
let mut button3= Button::new(0,0,50,20,"add3");
button3.set_callback(move|widg|{
let _ = app::handle_main(CustomEvents::AddThree.ordinal());
});
container.end();
container.set_type(PackType::Horizontal);
Adder{
pack: container
}
}
}
struct MyWindow{
}
impl MyWindow{
pub fn new()->Self{
let counter = Rc::from(RefCell::from(0));
let mut win = Window::new(200, 200, 300, 200, "Ordinal Events");
let mut disp_frame= Frame::new(200,0,200,200,"0").center_of_parent();
let _adder = Adder::new();
let counter_cl = counter.clone();
//update our frame's label with the counter value
disp_frame.handle(move |widg, ev|
if ev.bits() == CustomEvents::AddOne.ordinal(){
widg.set_label(&*counter_cl.borrow_mut().to_string());
dbg!("also handled event here");
true
} else if ev.bits() == CustomEvents::AddTwo.ordinal(){
widg.set_label(&*counter_cl.borrow_mut().to_string());
dbg!("also handled event here");
true
} else if ev.bits() == CustomEvents::AddThree.ordinal(){
widg.set_label(&*counter_cl.borrow_mut().to_string());
dbg!("also handled event here");
true
} else {
false
});
win.end();
win.show();
//increment the counter value
win.handle(move |_, ev|
if ev.bits() == CustomEvents::AddOne.ordinal(){
*counter.borrow_mut() += 1;
true
} else if ev.bits() == CustomEvents::AddTwo.ordinal(){
*counter.borrow_mut() += 2;
true
} else if ev.bits() == CustomEvents::AddThree.ordinal(){
*counter.borrow_mut() += 3;
true
} else {
false
});
MyWindow{}
}
}
fn main() {
let app = App::default();
let _ = MyWindow::new();
dbg!(CustomEvents::AddOne.ordinal());
dbg!(CustomEvents::AddTwo.ordinal());
dbg!(CustomEvents::AddThree.ordinal());
// [src\main.rs:126] CustomEvents::AddOne.ordinal() = 41
// [src\main.rs:127] CustomEvents::AddTwo.ordinal() = 42
// [src\main.rs:128] CustomEvents::AddThree.ordinal() = 43
app.run().unwrap();
}
| 32.27027 | 138 | 0.579844 |
5066669a4304335e2d0a8320e6f2d60114d9675e | 4,762 | use {
macroquad::{
camera::{set_camera, Camera as _, Camera3D},
color::{
Color, BLUE, DARKGRAY, GOLD, GREEN, LIME, MAGENTA, MAROON, ORANGE, PINK, RED, WHITE,
YELLOW,
},
input::{is_key_pressed, is_mouse_button_down, mouse_position, KeyCode, MouseButton},
models::{draw_line_3d, draw_sphere},
time::get_frame_time,
window::{clear_background, next_frame, screen_height, screen_width},
},
na::{Isometry3, Point, Point3, Vector, Vector3},
skelly::{ik::rotor::RotorSolver, Posture, Skelly},
};
#[macroquad::main("ik-test")]
async fn main() {
let mut skelly = Skelly::<f32, Color>::new();
let mut index = skelly.add_root_with(Point::origin(), GOLD);
index = skelly.attach_with(Vector3::z().into(), index, MAROON);
index = skelly.attach_with(Vector3::z().into(), index, PINK);
let mut left = skelly.attach_with(Vector3::z().into(), index, ORANGE);
left = skelly.attach_with((-Vector3::x()).into(), left, MAGENTA);
left = skelly.attach_with((-Vector3::x()).into(), left, BLUE);
let mut right = skelly.attach_with(Vector3::z().into(), index, LIME);
right = skelly.attach_with(Vector3::x().into(), right, YELLOW);
right = skelly.attach_with(Vector3::x().into(), right, WHITE);
let mut globals = vec![Isometry3::identity(); skelly.len()];
let mut solver = RotorSolver::<f32>::new(0.0001);
// solver.set_position_goal(index, Point::origin());
solver.set_position_goal(left, Point::origin());
solver.set_position_goal(right, Point::origin());
let mut camera = Camera3D::default();
let mut left_target = Point::origin();
let mut right_target = Point::origin();
let mut posture = skelly.make_posture();
camera.position.y += 5.0;
let mut solver_wait_for = 1.0;
loop {
if is_key_pressed(KeyCode::Escape) {
break;
}
let camera_matrix = camera.matrix();
if is_mouse_button_down(MouseButton::Left) {
let (x, y) = mouse_position();
let (x, y) = (
x * 2.0 / screen_width() - 1.0,
1.0 - y * 2.0 / screen_height(),
);
let camera_matrix_inv = camera_matrix.inverse();
let o = camera_matrix_inv.transform_point3(macroquad::math::Vec3::zero());
let t = camera_matrix_inv.transform_point3(macroquad::math::Vec3::new(x, y, 0.999));
let d = t - o;
let f = -o.y / d.y;
let x = d * f + o;
left_target = Point3::from(Vector::from([x.x, x.y, x.z]));
solver.set_position_goal(left, left_target);
}
if is_mouse_button_down(MouseButton::Right) {
let (x, y) = mouse_position();
let (x, y) = (
x * 2.0 / screen_width() - 1.0,
1.0 - y * 2.0 / screen_height(),
);
let camera_matrix_inv = camera_matrix.inverse();
let o = camera_matrix_inv.transform_point3(macroquad::math::Vec3::new(0.0, 0.0, 0.0));
let t = camera_matrix_inv.transform_point3(macroquad::math::Vec3::new(x, y, 0.999));
let d = t - o;
let f = -o.y / d.y;
let x = d * f + o;
right_target = Point::from(Vector::from([x.x, x.y, x.z]));
solver.set_position_goal(right, right_target);
}
solver_wait_for -= get_frame_time();
while solver_wait_for < 0.0 {
let _solved = solver.solve_step(&skelly, &mut posture);
solver_wait_for += 0.001;
}
set_camera(camera);
next_frame().await;
clear_background(DARKGRAY);
draw_sphere(
macroquad::math::Vec3::new(left_target.x, left_target.y, left_target.z),
0.1,
None,
RED,
);
draw_sphere(
macroquad::math::Vec3::new(right_target.x, right_target.y, right_target.z),
0.1,
None,
GREEN,
);
draw_skelly(&skelly, &posture, &mut globals);
}
}
fn draw_skelly(
skelly: &Skelly<f32, Color>,
posture: &Posture<f32>,
globals: &mut Vec<Isometry3<f32>>,
) {
skelly.write_globals_for_posture(posture, globals);
for index in 0..skelly.len() {
if let Some(parent) = skelly.get_parent(index) {
let start = &globals[parent].translation.vector;
let end = &globals[index].translation.vector;
let color = *skelly.get_userdata(index);
draw_line_3d(
macroquad::math::Vec3::new(start.x, start.y, start.z),
macroquad::math::Vec3::new(end.x, end.y, end.z),
color,
);
}
}
}
| 34.258993 | 98 | 0.562999 |
f8845f1e95d1922423f941fee6fb1cd5da637123 | 1,597 | use crate::{database::DatabaseGuard, ConduitResult, Error, Ruma};
use ruma::api::client::{
error::ErrorKind,
r0::filter::{create_filter, get_filter},
};
#[cfg(feature = "conduit_bin")]
use rocket::{get, post};
/// # `GET /_matrix/client/r0/user/{userId}/filter/{filterId}`
///
/// Loads a filter that was previously created.
///
/// - A user can only access their own filters
#[cfg_attr(
feature = "conduit_bin",
get("/_matrix/client/r0/user/<_>/filter/<_>", data = "<body>")
)]
#[tracing::instrument(skip(db, body))]
pub async fn get_filter_route(
db: DatabaseGuard,
body: Ruma<get_filter::Request<'_>>,
) -> ConduitResult<get_filter::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
let filter = match db.users.get_filter(sender_user, &body.filter_id)? {
Some(filter) => filter,
None => return Err(Error::BadRequest(ErrorKind::NotFound, "Filter not found.")),
};
Ok(get_filter::Response::new(filter).into())
}
/// # `PUT /_matrix/client/r0/user/{userId}/filter`
///
/// Creates a new filter to be used by other endpoints.
#[cfg_attr(
feature = "conduit_bin",
post("/_matrix/client/r0/user/<_>/filter", data = "<body>")
)]
#[tracing::instrument(skip(db, body))]
pub async fn create_filter_route(
db: DatabaseGuard,
body: Ruma<create_filter::Request<'_>>,
) -> ConduitResult<create_filter::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
Ok(create_filter::Response::new(db.users.create_filter(sender_user, &body.filter)?).into())
}
| 33.270833 | 95 | 0.673763 |
89179a412e12cfed40d5665116705b49bfb8538b | 2,539 | mod parse;
mod eval;
pub use parse::*;
pub use eval::*;
#[derive(Debug, Clone, PartialEq)]
pub enum Operation {
Add(Vec<Operation>),
Mul(Vec<Operation>),
Neg(Box<Operation>),
Sqrt(Box<Operation>),
Ln(Box<Operation>),
Exp(Box<Operation>),
Div(Box<(Operation, Operation)>),
Literal(f32),
Variable(Variable)
}
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub struct Variable(tinystr::TinyStrAuto);
impl Variable {
// panics if not ascii
pub fn new(s: &str) -> Self {
Self(s.parse().unwrap())
}
pub fn as_str(&self) -> &str {
&self.0
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Expression {
pub operation: Operation,
pub variables: Vec<Variable>,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum SetVarOrderError {
LengthsDoNotMatch,
VariablesDoNotMatch,
}
impl Expression {
pub fn set_variable_order(&mut self, new_order: Vec<Variable>) -> Result<(), SetVarOrderError> {
if new_order.len() != self.variables.len() {
Err(SetVarOrderError::LengthsDoNotMatch)
} else if self.variables.iter().any(
|v1| new_order.iter().find(|v2| *v2 == v1).is_none()) {
Err(SetVarOrderError::VariablesDoNotMatch)
} else {
self.variables = new_order;
Ok(())
}
}
}
impl std::str::FromStr for Expression {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
parse_expression(s)
}
}
impl std::fmt::Display for Operation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
use Operation::*;
match self {
Add(terms) => format_op_list(&terms, f, '+'),
Mul(terms) => format_op_list(&terms, f, '*'),
Neg(op) => write!(f, "-{}", op),
Sqrt(op) => write!(f, "√({})", op),
Ln(op) => write!(f, "ln({})", op),
Exp(op) => write!(f, "exp({})", op),
Div(op) => write!(f, "({}) / ({})", op.0, op.1),
Literal(n) => write!(f, "{}", n),
Variable(v) => write!(f, "{}", v.0),
}
}
}
fn format_op_list(
ops: &[Operation],
f: &mut std::fmt::Formatter<'_>,
sep: char,
) -> Result<(), std::fmt::Error> {
write!(f, "(")?;
if ops.len() != 0 {
for op in ops[..(ops.len()-1)].iter() {
write!(f, "{} {} ", op, sep)?;
}
write!(f, "{}", ops[ops.len()-1])?;
}
write!(f, ")")?;
Ok(())
}
| 25.646465 | 100 | 0.523434 |
015fb8f2bf88e68196a0880254a578a44290c577 | 96,074 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
extern crate tokio;
#[macro_use]
extern crate clap;
extern crate yup_oauth2 as oauth2;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use google_file1_beta1::{api, Error};
mod client;
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::CloudFilestore,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
async fn _projects_locations_backups_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"capacity-gb" => Some(("capacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"download-bytes" => Some(("downloadBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-file-share" => Some(("sourceFileShare", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-instance" => Some(("sourceInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-instance-tier" => Some(("sourceInstanceTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "download-bytes", "labels", "name", "source-file-share", "source-instance", "source-instance-tier", "state", "storage-bytes"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Backup = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_backups_create(request, opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"backup-id" => {
call = call.backup_id(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["backup-id"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_backups_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_backups_delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_backups_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_backups_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_backups_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_backups_list(opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"order-by" => {
call = call.order_by(value.unwrap_or(""));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_backups_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"capacity-gb" => Some(("capacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"download-bytes" => Some(("downloadBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-file-share" => Some(("sourceFileShare", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-instance" => Some(("sourceInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-instance-tier" => Some(("sourceInstanceTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "download-bytes", "labels", "name", "source-file-share", "source-instance", "source-instance-tier", "state", "storage-bytes"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Backup = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_backups_patch(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"update-mask" => {
call = call.update_mask(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["update-mask"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_instances_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "state", "status-message", "tier"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Instance = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_instances_create(request, opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"instance-id" => {
call = call.instance_id(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["instance-id"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_instances_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_instances_delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_instances_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_instances_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_instances_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_instances_list(opt.value_of("parent").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"order-by" => {
call = call.order_by(value.unwrap_or(""));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_instances_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "state", "status-message", "tier"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Instance = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_instances_patch(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"update-mask" => {
call = call.update_mask(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["update-mask"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_instances_restore(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"file-share" => Some(("fileShare", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-backup" => Some(("sourceBackup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"source-snapshot" => Some(("sourceSnapshot", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["file-share", "source-backup", "source-snapshot"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::RestoreInstanceRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_instances_restore(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"include-unrevealed-locations" => {
call = call.include_unrevealed_locations(arg_from_str(value.unwrap_or("false"), err, "include-unrevealed-locations", "boolean"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "include-unrevealed-locations", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec![]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::CancelOperationRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().locations_operations_cancel(request, opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().locations_operations_list(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("projects", Some(opt)) => {
match opt.subcommand() {
("locations-backups-create", Some(opt)) => {
call_result = self._projects_locations_backups_create(opt, dry_run, &mut err).await;
},
("locations-backups-delete", Some(opt)) => {
call_result = self._projects_locations_backups_delete(opt, dry_run, &mut err).await;
},
("locations-backups-get", Some(opt)) => {
call_result = self._projects_locations_backups_get(opt, dry_run, &mut err).await;
},
("locations-backups-list", Some(opt)) => {
call_result = self._projects_locations_backups_list(opt, dry_run, &mut err).await;
},
("locations-backups-patch", Some(opt)) => {
call_result = self._projects_locations_backups_patch(opt, dry_run, &mut err).await;
},
("locations-get", Some(opt)) => {
call_result = self._projects_locations_get(opt, dry_run, &mut err).await;
},
("locations-instances-create", Some(opt)) => {
call_result = self._projects_locations_instances_create(opt, dry_run, &mut err).await;
},
("locations-instances-delete", Some(opt)) => {
call_result = self._projects_locations_instances_delete(opt, dry_run, &mut err).await;
},
("locations-instances-get", Some(opt)) => {
call_result = self._projects_locations_instances_get(opt, dry_run, &mut err).await;
},
("locations-instances-list", Some(opt)) => {
call_result = self._projects_locations_instances_list(opt, dry_run, &mut err).await;
},
("locations-instances-patch", Some(opt)) => {
call_result = self._projects_locations_instances_patch(opt, dry_run, &mut err).await;
},
("locations-instances-restore", Some(opt)) => {
call_result = self._projects_locations_instances_restore(opt, dry_run, &mut err).await;
},
("locations-list", Some(opt)) => {
call_result = self._projects_locations_list(opt, dry_run, &mut err).await;
},
("locations-operations-cancel", Some(opt)) => {
call_result = self._projects_locations_operations_cancel(opt, dry_run, &mut err).await;
},
("locations-operations-delete", Some(opt)) => {
call_result = self._projects_locations_operations_delete(opt, dry_run, &mut err).await;
},
("locations-operations-get", Some(opt)) => {
call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await;
},
("locations-operations-list", Some(opt)) => {
call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("projects".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "file1-beta1-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
secret,
yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).persist_tokens_to_disk(format!("{}/file1-beta1", config_dir)).build().await.unwrap();
let client = hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots());
let engine = Engine {
opt: opt,
hub: api::CloudFilestore::new(client, auth),
gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"],
gpm: vec![
("$-xgafv", "$.xgafv"),
("access-token", "access_token"),
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("upload-type", "uploadType"),
("upload-protocol", "upload_protocol"),
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
let arg_data = [
("projects", "methods: 'locations-backups-create', 'locations-backups-delete', 'locations-backups-get', 'locations-backups-list', 'locations-backups-patch', 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-restore', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![
("locations-backups-create",
Some(r##"Creates a backup."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-backups-create",
vec![
(Some(r##"parent"##),
None,
Some(r##"Required. The backup's project and location, in the format projects/{project_id}/locations/{location}. In Cloud Filestore, backup locations map to GCP regions, for example **us-west1**."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-backups-delete",
Some(r##"Deletes a backup."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-backups-delete",
vec![
(Some(r##"name"##),
None,
Some(r##"Required. The backup resource name, in the format projects/{project_id}/locations/{location}/backups/{backup_id}"##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-backups-get",
Some(r##"Gets the details of a specific backup."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-backups-get",
vec![
(Some(r##"name"##),
None,
Some(r##"Required. The backup resource name, in the format projects/{project_id}/locations/{location}/backups/{backup_id}."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-backups-list",
Some(r##"Lists all backups in a project for either a specified location or for all locations."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-backups-list",
vec![
(Some(r##"parent"##),
None,
Some(r##"Required. The project and location for which to retrieve backup information, in the format projects/{project_id}/locations/{location}. In Cloud Filestore, backup locations map to GCP regions, for example **us-west1**. To retrieve backup information for all locations, use "-" for the {location} value."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-backups-patch",
Some(r##"Updates the settings of a specific backup."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-backups-patch",
vec![
(Some(r##"name"##),
None,
Some(r##"Output only. The resource name of the backup, in the format projects/{project_id}/locations/{location_id}/backups/{backup_id}."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-get",
Some(r##"Gets information about a location."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-get",
vec![
(Some(r##"name"##),
None,
Some(r##"Resource name for the location."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-instances-create",
Some(r##"Creates an instance. When creating from a backup, the capacity of the new instance needs to be equal to or larger than the capacity of the backup (and also equal to or larger than the minimum capacity of the tier)."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-create",
vec![
(Some(r##"parent"##),
None,
Some(r##"Required. The instance's project and location, in the format projects/{project_id}/locations/{location}. In Cloud Filestore, locations map to GCP zones, for example **us-west1-b**."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-instances-delete",
Some(r##"Deletes an instance."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-delete",
vec![
(Some(r##"name"##),
None,
Some(r##"Required. The instance resource name, in the format projects/{project_id}/locations/{location}/instances/{instance_id}"##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-instances-get",
Some(r##"Gets the details of a specific instance."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-get",
vec![
(Some(r##"name"##),
None,
Some(r##"Required. The instance resource name, in the format projects/{project_id}/locations/{location}/instances/{instance_id}."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-instances-list",
Some(r##"Lists all instances in a project for either a specified location or for all locations."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-list",
vec![
(Some(r##"parent"##),
None,
Some(r##"Required. The project and location for which to retrieve instance information, in the format projects/{project_id}/locations/{location}. In Cloud Filestore, locations map to GCP zones, for example **us-west1-b**. To retrieve instance information for all locations, use "-" for the {location} value."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-instances-patch",
Some(r##"Updates the settings of a specific instance."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-patch",
vec![
(Some(r##"name"##),
None,
Some(r##"Output only. The resource name of the instance, in the format projects/{project_id}/locations/{location_id}/instances/{instance_id}."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-instances-restore",
Some(r##"Restores an existing instance's file share from a backup. The capacity of the instance needs to be equal to or larger than the capacity of the backup (and also equal to or larger than the minimum capacity of the tier)."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-restore",
vec![
(Some(r##"name"##),
None,
Some(r##"Required. The resource name of the instance, in the format projects/{project_id}/locations/{location_id}/instances/{instance_id}."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-list",
Some(r##"Lists information about the supported locations for this service."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-list",
vec![
(Some(r##"name"##),
None,
Some(r##"The resource that owns the locations collection, if applicable."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-cancel",
Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-operations-cancel",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be cancelled."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-delete",
Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-operations-delete",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be deleted."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-get",
Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-operations-get",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("locations-operations-list",
Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##),
"Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-operations-list",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation's parent resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("file1-beta1")
.author("Sebastian Thiel <[email protected]>")
.version("2.0.8+20210304")
.about("The Cloud Filestore API is used for creating and managing cloud file servers.")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_file1_beta1_cli")
.arg(Arg::with_name("url")
.long("scope")
.help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Debug print all errors")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches).await {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
| 51.459025 | 639 | 0.449945 |
03b01b5790e82dda4521ea0ba7de0fc7bedadd8a | 94 | pub mod ignore;
pub fn ignore(buf: &[u8]) -> ignore::Iter<'_> {
ignore::Iter::new(buf)
}
| 15.666667 | 47 | 0.585106 |
fe209d19fa4d0d0b2899942636e5f7b058fa7b5d | 6,659 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::Result;
use consensus_types::common::Round;
use libra_config::config::PersistableConfig;
use serde::{Deserialize, Serialize};
use std::{cmp::Ordering, path::PathBuf};
#[cfg(test)]
use tempfile::NamedTempFile;
/// SafetyRules needs an abstract storage interface to act as a common utility for storing
/// persistent data to local disk, cloud, secrets managers, or even memory (for tests)
/// Any set function is expected to sync to the remote system before returning.
/// @TODO add access to private key from persistent store
/// @TODO add retrieval of private key based upon public key to persistent store
pub trait PersistentStorage: Send + Sync {
fn epoch(&self) -> u64;
fn set_epoch(&mut self, epoch: u64) -> Result<()>;
fn last_voted_round(&self) -> Round;
fn set_last_voted_round(&mut self, last_voted_round: Round) -> Result<()>;
fn preferred_round(&self) -> Round;
fn set_preferred_round(&mut self, last_voted_round: Round) -> Result<()>;
}
#[derive(Debug, Deserialize, Eq, PartialEq, PartialOrd, Serialize)]
pub struct InMemoryStorage {
epoch: u64,
last_voted_round: Round,
preferred_round: Round,
}
impl InMemoryStorage {
pub fn new(epoch: u64, last_voted_round: Round, preferred_round: Round) -> Self {
Self {
epoch,
last_voted_round,
preferred_round,
}
}
pub fn default_storage() -> Box<dyn PersistentStorage> {
Box::new(Self::default())
}
}
impl Default for InMemoryStorage {
fn default() -> Self {
Self {
epoch: 1,
last_voted_round: 0,
preferred_round: 0,
}
}
}
impl Ord for InMemoryStorage {
fn cmp(&self, other: &InMemoryStorage) -> Ordering {
let epoch = self.epoch.cmp(&other.epoch);
if epoch != Ordering::Equal {
return epoch;
}
let last_voted_round = self.last_voted_round.cmp(&other.last_voted_round);
if last_voted_round != Ordering::Equal {
return last_voted_round;
}
self.preferred_round.cmp(&other.preferred_round)
}
}
impl PersistentStorage for InMemoryStorage {
fn epoch(&self) -> u64 {
self.epoch
}
fn set_epoch(&mut self, epoch: u64) -> Result<()> {
self.epoch = epoch;
Ok(())
}
fn preferred_round(&self) -> Round {
self.preferred_round
}
fn set_preferred_round(&mut self, preferred_round: Round) -> Result<()> {
self.preferred_round = preferred_round;
Ok(())
}
fn last_voted_round(&self) -> Round {
self.last_voted_round
}
fn set_last_voted_round(&mut self, last_voted_round: Round) -> Result<()> {
self.last_voted_round = last_voted_round;
Ok(())
}
}
#[test]
fn test_in_memory_storage() {
let mut storage: Box<dyn PersistentStorage> = InMemoryStorage::default_storage();
assert_eq!(storage.epoch(), 1);
assert_eq!(storage.last_voted_round(), 0);
assert_eq!(storage.preferred_round(), 0);
storage.set_epoch(9).unwrap();
storage.set_last_voted_round(8).unwrap();
storage.set_preferred_round(1).unwrap();
assert_eq!(storage.epoch(), 9);
assert_eq!(storage.last_voted_round(), 8);
assert_eq!(storage.preferred_round(), 1);
}
pub struct OnDiskStorage {
file_path: PathBuf,
file_path_alt: PathBuf,
internal_data: InMemoryStorage,
}
impl OnDiskStorage {
pub fn new_storage(file_path: PathBuf) -> Result<Box<dyn PersistentStorage>> {
Self::new_internal(file_path, false)
}
pub fn default_storage(file_path: PathBuf) -> Result<Box<dyn PersistentStorage>> {
Self::new_internal(file_path, true)
}
fn new_internal(mut file_path: PathBuf, default: bool) -> Result<Box<dyn PersistentStorage>> {
let mut file_path_alt = PathBuf::from(format!("{}.alt", file_path.to_str().unwrap()));
let internal_data = InMemoryStorage::load_config(&file_path);
let internal_data_alt = InMemoryStorage::load_config(&file_path_alt);
if !default && internal_data.is_err() && internal_data_alt.is_err() {
if let Err(err) = internal_data {
return Err(err);
}
}
let mut internal_data = internal_data.unwrap_or_default();
let internal_data_alt = internal_data_alt.unwrap_or_default();
if internal_data < internal_data_alt {
internal_data = internal_data_alt;
std::mem::swap(&mut file_path, &mut file_path_alt);
}
Ok(Box::new(Self {
file_path,
file_path_alt,
internal_data,
}))
}
fn save_and_swap(&mut self) -> Result<()> {
self.internal_data.save_config(&self.file_path)?;
std::mem::swap(&mut self.file_path, &mut self.file_path_alt);
Ok(())
}
}
impl PersistentStorage for OnDiskStorage {
fn epoch(&self) -> u64 {
self.internal_data.epoch()
}
fn set_epoch(&mut self, epoch: u64) -> Result<()> {
self.internal_data.set_epoch(epoch)?;
self.save_and_swap()?;
Ok(())
}
fn preferred_round(&self) -> Round {
self.internal_data.preferred_round()
}
fn set_preferred_round(&mut self, preferred_round: Round) -> Result<()> {
self.internal_data.set_preferred_round(preferred_round)?;
self.save_and_swap()?;
Ok(())
}
fn last_voted_round(&self) -> Round {
self.internal_data.last_voted_round()
}
fn set_last_voted_round(&mut self, last_voted_round: Round) -> Result<()> {
self.internal_data.set_last_voted_round(last_voted_round)?;
self.save_and_swap()?;
Ok(())
}
}
#[test]
fn test_on_disk_storage() {
let file_path = NamedTempFile::new().unwrap().into_temp_path().to_path_buf();
let mut storage: Box<dyn PersistentStorage> =
OnDiskStorage::default_storage(file_path.clone()).unwrap();
assert_eq!(storage.epoch(), 1);
assert_eq!(storage.last_voted_round(), 0);
assert_eq!(storage.preferred_round(), 0);
storage.set_epoch(9).unwrap();
storage.set_last_voted_round(8).unwrap();
storage.set_preferred_round(1).unwrap();
assert_eq!(storage.epoch(), 9);
assert_eq!(storage.last_voted_round(), 8);
assert_eq!(storage.preferred_round(), 1);
let storage: Box<dyn PersistentStorage> = OnDiskStorage::default_storage(file_path).unwrap();
assert_eq!(storage.epoch(), 9);
assert_eq!(storage.last_voted_round(), 8);
assert_eq!(storage.preferred_round(), 1);
}
| 30.828704 | 98 | 0.646193 |
ac8ce0a6b11ffb3c6e657c4e19d18dd244f8ab3b | 2,606 | extern crate amqp;
extern crate env_logger;
use amqp::{Basic, Session, Channel, Table, protocol};
use std::default::Default;
use std::thread;
//table types:
//use table::{FieldTable, Table, Bool, ShortShortInt, ShortShortUint, ShortInt, ShortUint, LongInt, LongUint, LongLongInt, LongLongUint, Float, Double, DecimalValue, LongString, FieldArray, Timestamp};
fn consumer_function(channel: &mut Channel, deliver: protocol::basic::Deliver, headers: protocol::basic::BasicProperties, body: Vec<u8>) {
println!("Got a delivery:");
println!("Deliver info: {:?}", deliver);
println!("Content headers: {:?}", headers);
println!("Content body: {:?}", body);
channel.basic_ack(deliver.delivery_tag, false);
}
fn main() {
env_logger::init().unwrap();
let amqp_url = "amqp://guest:[email protected]//";
let mut session = match Session::open_url(amqp_url) {
Ok(session) => session,
Err(error) => panic!("Can't create session: {:?}", error)
};
let mut channel = session.open_channel(1).ok().expect("Can't open channel");
println!("Openned channel: {}", channel.id);
let queue_name = "test_queue";
//queue: &str, passive: bool, durable: bool, exclusive: bool, auto_delete: bool, nowait: bool, arguments: Table
let queue_declare = channel.queue_declare(queue_name, false, true, false, false, false, Table::new());
println!("Queue declare: {:?}", queue_declare);
for get_result in channel.basic_get(queue_name, false) {
println!("Headers: {:?}", get_result.headers);
println!("Reply: {:?}", get_result.reply);
println!("Body: {:?}", String::from_utf8_lossy(&get_result.body));
get_result.ack();
}
//queue: &str, consumer_tag: &str, no_local: bool, no_ack: bool, exclusive: bool, nowait: bool, arguments: Table
println!("Declaring consumer...");
let consumer_name = channel.basic_consume(consumer_function, queue_name, "", false, false, false, false, Table::new());
println!("Starting consumer {:?}", consumer_name);
let consumers_thread = thread::spawn(move || {
channel.start_consuming();
channel
});
// There is currently no way to stop the consumers, so we infinitely join thread.
let mut channel = consumers_thread.join().ok().expect("Can't get channel from consumer thread");
channel.basic_publish("", queue_name, true, false,
protocol::basic::BasicProperties{ content_type: Some("text".to_string()), ..Default::default()},
(b"Hello from rust!").to_vec());
channel.close(200, "Bye");
session.close(200, "Good Bye");
}
| 44.169492 | 201 | 0.665388 |
1c803e513243e7b8bd246e81d72d541043be2632 | 9,582 | //! Interface for reading object files.
use alloc::vec::Vec;
use core::{cmp, fmt, result};
use crate::common::{
FileFlags, RelocationEncoding, RelocationKind, SectionFlags, SectionKind, SymbolFlags,
SymbolKind, SymbolScope,
};
use crate::pod::Bytes;
mod util;
pub use util::StringTable;
mod any;
pub use any::*;
#[cfg(feature = "coff")]
pub mod coff;
#[cfg(feature = "elf")]
pub mod elf;
#[cfg(feature = "macho")]
pub mod macho;
#[cfg(feature = "pe")]
pub mod pe;
mod traits;
pub use traits::*;
#[cfg(feature = "wasm")]
pub mod wasm;
mod private {
pub trait Sealed {}
}
/// The error type used within the read module.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Error(&'static str);
impl fmt::Display for Error {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.0)
}
}
#[cfg(feature = "std")]
impl std::error::Error for Error {}
/// The result type used within the read module.
pub type Result<T> = result::Result<T, Error>;
trait ReadError<T> {
fn read_error(self, error: &'static str) -> Result<T>;
}
impl<T> ReadError<T> for result::Result<T, ()> {
fn read_error(self, error: &'static str) -> Result<T> {
self.map_err(|()| Error(error))
}
}
impl<T> ReadError<T> for Option<T> {
fn read_error(self, error: &'static str) -> Result<T> {
self.ok_or(Error(error))
}
}
/// The native executable file for the target platform.
#[cfg(all(target_os = "linux", target_pointer_width = "32", feature = "elf"))]
pub type NativeFile<'data> = elf::ElfFile32<'data>;
/// The native executable file for the target platform.
#[cfg(all(target_os = "linux", target_pointer_width = "64", feature = "elf"))]
pub type NativeFile<'data> = elf::ElfFile64<'data>;
/// The native executable file for the target platform.
#[cfg(all(target_os = "macos", target_pointer_width = "32", feature = "macho"))]
pub type NativeFile<'data> = macho::MachOFile32<'data>;
/// The native executable file for the target platform.
#[cfg(all(target_os = "macos", target_pointer_width = "64", feature = "macho"))]
pub type NativeFile<'data> = macho::MachOFile64<'data>;
/// The native executable file for the target platform.
#[cfg(all(target_os = "windows", target_pointer_width = "32", feature = "pe"))]
pub type NativeFile<'data> = pe::PeFile32<'data>;
/// The native executable file for the target platform.
#[cfg(all(target_os = "windows", target_pointer_width = "64", feature = "pe"))]
pub type NativeFile<'data> = pe::PeFile64<'data>;
/// The native executable file for the target platform.
#[cfg(all(feature = "wasm", target_arch = "wasm32", feature = "wasm"))]
pub type NativeFile<'data> = wasm::WasmFile<'data>;
/// The index used to identify a section of a file.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SectionIndex(pub usize);
/// The index used to identify a symbol of a file.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SymbolIndex(pub usize);
/// The section where a symbol is defined.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum SymbolSection {
/// The section is unknown.
Unknown,
/// The section is not applicable for this symbol (such as file symbols).
None,
/// The symbol is undefined.
Undefined,
/// The symbol has an absolute value.
Absolute,
/// The symbol is a zero-initialized symbol that will be combined with duplicate definitions.
Common,
/// The symbol is defined in the given section.
Section(SectionIndex),
}
impl SymbolSection {
/// Returns the section index for the section where the symbol is defined.
///
/// May return `None` if the symbol is not defined in a section.
#[inline]
pub fn index(self) -> Option<SectionIndex> {
if let SymbolSection::Section(index) = self {
Some(index)
} else {
None
}
}
}
/// A symbol table entry.
#[derive(Clone, Debug)]
pub struct Symbol<'data> {
name: Option<&'data str>,
address: u64,
size: u64,
kind: SymbolKind,
section: SymbolSection,
weak: bool,
scope: SymbolScope,
flags: SymbolFlags<SectionIndex>,
}
impl<'data> Symbol<'data> {
/// Return the kind of this symbol.
#[inline]
pub fn kind(&self) -> SymbolKind {
self.kind
}
/// Returns the section where the symbol is defined.
#[inline]
pub fn section(&self) -> SymbolSection {
self.section
}
/// Returns the section index for the section containing this symbol.
///
/// May return `None` if the symbol is not defined in a section.
#[inline]
pub fn section_index(&self) -> Option<SectionIndex> {
self.section.index()
}
/// Return true if the symbol is undefined.
#[inline]
pub fn is_undefined(&self) -> bool {
self.section == SymbolSection::Undefined
}
/// Return true if the symbol is common data.
///
/// Note: does not check for `SymbolSection::Section` with `SectionKind::Common`.
#[inline]
fn is_common(&self) -> bool {
self.section == SymbolSection::Common
}
/// Return true if the symbol is weak.
#[inline]
pub fn is_weak(&self) -> bool {
self.weak
}
/// Return true if the symbol visible outside of the compilation unit.
///
/// This treats `SymbolScope::Unknown` as global.
#[inline]
pub fn is_global(&self) -> bool {
!self.is_local()
}
/// Return true if the symbol is only visible within the compilation unit.
#[inline]
pub fn is_local(&self) -> bool {
self.scope == SymbolScope::Compilation
}
/// Returns the symbol scope.
#[inline]
pub fn scope(&self) -> SymbolScope {
self.scope
}
/// Symbol flags that are specific to each file format.
#[inline]
pub fn flags(&self) -> SymbolFlags<SectionIndex> {
self.flags
}
/// The name of the symbol.
#[inline]
pub fn name(&self) -> Option<&'data str> {
self.name
}
/// The address of the symbol. May be zero if the address is unknown.
#[inline]
pub fn address(&self) -> u64 {
self.address
}
/// The size of the symbol. May be zero if the size is unknown.
#[inline]
pub fn size(&self) -> u64 {
self.size
}
}
/// A map from addresses to symbols.
#[derive(Debug)]
pub struct SymbolMap<'data> {
symbols: Vec<Symbol<'data>>,
}
impl<'data> SymbolMap<'data> {
/// Get the symbol containing the given address.
pub fn get(&self, address: u64) -> Option<&Symbol<'data>> {
self.symbols
.binary_search_by(|symbol| {
if address < symbol.address {
cmp::Ordering::Greater
} else if address < symbol.address + symbol.size {
cmp::Ordering::Equal
} else {
cmp::Ordering::Less
}
})
.ok()
.and_then(|index| self.symbols.get(index))
}
/// Get all symbols in the map.
#[inline]
pub fn symbols(&self) -> &[Symbol<'data>] {
&self.symbols
}
/// Return true for symbols that should be included in the map.
fn filter(symbol: &Symbol<'_>) -> bool {
match symbol.kind() {
SymbolKind::Unknown | SymbolKind::Text | SymbolKind::Data => {}
SymbolKind::Null
| SymbolKind::Section
| SymbolKind::File
| SymbolKind::Label
| SymbolKind::Tls => {
return false;
}
}
!symbol.is_undefined() && !symbol.is_common() && symbol.size() > 0
}
}
/// The target referenced by a relocation.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum RelocationTarget {
/// The target is a symbol.
Symbol(SymbolIndex),
/// The target is a section.
Section(SectionIndex),
}
/// A relocation entry.
#[derive(Debug)]
pub struct Relocation {
kind: RelocationKind,
encoding: RelocationEncoding,
size: u8,
target: RelocationTarget,
addend: i64,
implicit_addend: bool,
}
impl Relocation {
/// The operation used to calculate the result of the relocation.
#[inline]
pub fn kind(&self) -> RelocationKind {
self.kind
}
/// Information about how the result of the relocation operation is encoded in the place.
#[inline]
pub fn encoding(&self) -> RelocationEncoding {
self.encoding
}
/// The size in bits of the place of the relocation.
///
/// If 0, then the size is determined by the relocation kind.
#[inline]
pub fn size(&self) -> u8 {
self.size
}
/// The target of the relocation.
#[inline]
pub fn target(&self) -> RelocationTarget {
self.target
}
/// The addend to use in the relocation calculation.
#[inline]
pub fn addend(&self) -> i64 {
self.addend
}
/// Set the addend to use in the relocation calculation.
#[inline]
pub fn set_addend(&mut self, addend: i64) {
self.addend = addend
}
/// Returns true if there is an implicit addend stored in the data at the offset
/// to be relocated.
#[inline]
pub fn has_implicit_addend(&self) -> bool {
self.implicit_addend
}
}
fn data_range(data: Bytes, data_address: u64, range_address: u64, size: u64) -> Option<&[u8]> {
let offset = range_address.checked_sub(data_address)?;
let data = data.read_bytes_at(offset as usize, size as usize).ok()?;
Some(data.0)
}
| 26.91573 | 97 | 0.615216 |
d770a1248658bac0bdc4de1ce1a9801ed22063df | 21,885 | use super::message::Message;
use super::peer;
use crate::network::server::Handle as ServerHandle;
use crossbeam::channel;
use log::{debug, warn};
use std::sync::{Arc, Mutex};
use crate::blockchain::{Blockchain, State};
use crate::block::{Block, Header, Content};
use crate::crypto::hash::{H256, Hashable, H160};
use std::thread;
use std::time;
use serde::{Serialize,Deserialize};
use std::collections::{HashMap, VecDeque};
use crate::transaction::{Transaction, SignedTransaction};
use crate::transaction::verify;
use log::{info};
pub struct Orphan {
orphan_blocks: HashMap<H256, Block>,
}
#[derive(Clone)]
pub struct Context {
msg_chan: channel::Receiver<(Vec<u8>, peer::Handle)>,
num_worker: usize,
server: ServerHandle,
blockchain : Arc<Mutex<Blockchain>>,
mempool : Arc<Mutex<HashMap<H256, SignedTransaction>>>,
states : Arc<Mutex<State>>,
txs : Arc<Mutex<VecDeque<SignedTransaction>>>,
}
pub fn new(
num_worker: usize,
msg_src: channel::Receiver<(Vec<u8>, peer::Handle)>,
server: &ServerHandle,
blockchain: &Arc<Mutex<Blockchain>>,
mempool: &Arc<Mutex<HashMap<H256, SignedTransaction>>>,
states: &Arc<Mutex<State>>,
txs: &Arc<Mutex<VecDeque<SignedTransaction>>>,
) -> Context {
Context {
msg_chan: msg_src,
num_worker,
server: server.clone(),
blockchain: Arc::clone(blockchain),
mempool: Arc::clone(mempool),
states: Arc::clone(states),
txs: Arc::clone(txs),
}
}
impl Context {
pub fn start(self) {
let num_worker = self.num_worker;
for i in 0..num_worker {
let cloned = self.clone();
thread::spawn(move || {
cloned.worker_loop();
warn!("Worker thread {} exited", i);
});
}
}
fn worker_loop(&self) {
let mut orphan_blocks: HashMap<H256, Block> = HashMap::new();
let mut orphan_buffer = Orphan {orphan_blocks: orphan_blocks};
let mut counter = 0;
let mut sum = 0;
let mut mark = 0;
let mut start = 0;
loop {
//println!("Total blockchain len: {}", self.blockchain.lock().unwrap().hash_blocks.len());
//println!("Orphan size: {}", orphan.len());
//println!("sum: {:?}", sum);
//println!("counter: {:?}", counter);
let msg = self.msg_chan.recv().unwrap();
let (msg, peer) = msg;
let msg: Message = bincode::deserialize(&msg).unwrap();
match msg {
Message::Ping(nonce) => {
debug!("Ping: {}", nonce);
peer.write(Message::Pong(nonce.to_string()));
}
Message::Pong(nonce) => {
debug!("Pong: {}", nonce);
}
Message::NewBlockHashes(block_hashes) => {
debug!("NewBlockHashes");
let size = block_hashes.len();
for i in (0..size) {
let exist = self.blockchain.lock().unwrap().hash_blocks.contains_key(&block_hashes[i]);
if(!exist)
{
peer.write(Message::GetBlocks(block_hashes.clone()));
break;
}
}
let longest_chain = self.blockchain.lock().unwrap().all_blocks_in_longest_chain();
info!("{:?}", longest_chain);
//println!("{:?}", longest_chain);
println!("Total number of blocks in blockchain: {} blocks", self.blockchain.lock().unwrap().hash_blocks.len());
println!("The number of orphan blocks: {} blocks", orphan_buffer.orphan_blocks.len());
}
Message::GetBlocks(getblocks) => {
debug!("GetBlocks");
let size = getblocks.len();
let mut exist = true;
for i in (0..size) {
if(!self.blockchain.lock().unwrap().hash_blocks.contains_key(&getblocks[i]))
{
exist = false;
break;
}
}
let mut exist_blocks : Vec<Block> = Vec::new();
if exist {
for i in (0..size) {
exist_blocks.push(self.blockchain.lock().unwrap().hash_blocks[&getblocks[i]].clone());
}
}
let longest_chain = self.blockchain.lock().unwrap().all_blocks_in_longest_chain();
//println!("{:?}", longest_chain);
info!("{:?}", longest_chain);
println!("Total number of blocks in blockchain: {} blocks", self.blockchain.lock().unwrap().hash_blocks.len());
println!("The number of orphan blocks: {} blocks", orphan_buffer.orphan_blocks.len());
peer.write(Message::Blocks(exist_blocks));
}
Message::Blocks(blocks) => {
debug!("Blocks");
info!("Receiving blocks mined by the other...");
let size = blocks.len();
let mut orphan_size = orphan_buffer.orphan_blocks.len();
for i in (0..size) {
//check
let mut valid_block = true;
let signedTxs_size = blocks[i].content.data.len();
for j in (0..signedTxs_size) {
//Transaction signature check
let public_key_bytes : &[u8] = &blocks[i].content.data[j].public_key;
let hash_key : H256 = ring::digest::digest(&ring::digest::SHA256, public_key_bytes).into();
let hash_key_20bytes : H160 = hash_key.as_ref()[12..=31].into();
if(!verify(&blocks[i].content.data[j].Transaction, &blocks[i].content.data[j].public_key, &blocks[i].content.data[j].Signature)){
//we need to discard the block or not is not yet decided
valid_block = false;
}
else if(hash_key_20bytes != blocks[i].content.data[j].sender_addr){
valid_block = false;
}
}
if (!valid_block) {
continue;
}
//transactions in the blocks are valid, we should remove them from mempool and update the states
for i in (0..size) {
let signedTxs_size = blocks[i].content.data.len();
for j in (0..signedTxs_size) {
if (self.mempool.lock().unwrap().contains_key(&blocks[i].content.data[j].hash())) {
let sender_addr = blocks[i].content.data[j].sender_addr;
let recver_addr = blocks[i].content.data[j].Transaction.recipAddress;
let trans_money = blocks[i].content.data[j].Transaction.val;
let accountNonce = blocks[i].content.data[j].Transaction.accountNonce;
//Transaction spending check
//println!("{:?}", accountNonce);
//println!("{:?}", self.states.lock().unwrap().accountMaping[&sender_addr].0);
if (accountNonce != self.states.lock().unwrap().accountMaping[&sender_addr].0 + 1) {
continue;
}
self.mempool.lock().unwrap().remove(&blocks[i].content.data[j].hash());
if (self.states.lock().unwrap().accountMaping[&sender_addr].1 >= trans_money) {
if let Some(x) = self.states.lock().unwrap().accountMaping.get_mut(&sender_addr) {
x.0 += 1;
x.1 -= trans_money;
}
if let Some(y) = self.states.lock().unwrap().accountMaping.get_mut(&recver_addr) {
y.1 += trans_money;
}
}
println!("{:?}", "new block txs");
info!("{:?}", self.states.lock().unwrap().accountMaping);
}
}
}
if (!self.blockchain.lock().unwrap().hash_blocks.contains_key(&blocks[i].hash())) {
if (!self.blockchain.lock().unwrap().hash_blocks.contains_key(&blocks[i].header.parent)) {
orphan_buffer.orphan_blocks.insert(blocks[i].header.parent, blocks[i].clone());
println!("The number of orphan blocks is increased to {} blocks", orphan_buffer.orphan_blocks.len());
let mut parent_hash: Vec<H256> = Vec::new();
parent_hash.push(blocks[i].header.parent);
peer.write(Message::GetBlocks(parent_hash.clone()));
}
else {
let par = self.blockchain.lock().unwrap().tip();
let diff = self.blockchain.lock().unwrap().hash_blocks[&par].header.difficulty;
if (blocks[i].hash() <= diff) {
// get network delay
let mut timestamp = blocks[i].header.timestamp;
let mut cur_time;
match time::SystemTime::now().duration_since(time::UNIX_EPOCH)
{
Ok(n) => cur_time = n.as_millis(),
Err(_) => panic!("SystemTime before UNIX EPOCH!"),
}
let mut delay = cur_time - timestamp;
println!("Network delay: {:?} ms", delay);
// get the average delay
sum += delay;
counter += 1;
let avg: f32 = (sum as f32)/(counter as f32);
println!("Average network delay: {:?} ms", avg);
// get block size
let serialized: Vec<u8> = bincode::serialize(&blocks[i]).unwrap();
let block_size = serialized.len();
println!("Block size: {:?}", block_size);
// get duration
// setting start as starting time
if mark == 0
{
start = cur_time;
mark = 1;
}
let time_diff = cur_time - start;
let dura = (time_diff as f32)/(1000 as f32);
println!("Time elapsed: {:?} seconds", dura.clone());
//To check the block is on the longset chain,
//1). If it is on the longest_chain, update the global state
//2). If is is the fork, use its parent block to calculate temp state and store in chainState
//3). If the block's parent is not the tip and after the insertion the block is the tip, that means new longest chain
let prev_tip = self.blockchain.lock().unwrap().tip();
self.blockchain.lock().unwrap().insert(&blocks[i]);
if (blocks[i].header.parent == prev_tip) { // condition 1.
self.blockchain.lock().unwrap().chainState.insert(blocks[i].hash(), self.states.lock().unwrap().clone());
}
else { //condition 2 & 3
let sender_addr = blocks[i].content.data[0].sender_addr;
let recver_addr = blocks[i].content.data[0].Transaction.recipAddress;
let trans_money = blocks[i].content.data[0].Transaction.val;
let accountNonce = blocks[i].content.data[0].Transaction.accountNonce;
if (self.blockchain.lock().unwrap().tip() != blocks[i].hash()) { // condition 2
let mut parent_state = self.blockchain.lock().unwrap().chainState[&blocks[i].header.parent].clone();
if (parent_state.accountMaping[&sender_addr].1 >= trans_money) {
if let Some(x) = parent_state.accountMaping.get_mut(&sender_addr) {
x.0 += 1;
x.1 -= trans_money;
}
if let Some(y) = parent_state.accountMaping.get_mut(&recver_addr) {
y.1 += trans_money;
}
}
self.blockchain.lock().unwrap().chainState.insert(blocks[i].hash(), parent_state.clone());
self.states.lock().unwrap().accountMaping = self.blockchain.lock().unwrap().chainState[&prev_tip].accountMaping.clone();
}
else { // condition 3
let mut parent_state = self.blockchain.lock().unwrap().chainState[&blocks[i].header.parent].clone();
if (parent_state.accountMaping[&sender_addr].1 >= trans_money) {
if let Some(x) = parent_state.accountMaping.get_mut(&sender_addr) {
x.0 += 1;
x.1 -= trans_money;
}
if let Some(y) = parent_state.accountMaping.get_mut(&recver_addr) {
y.1 += trans_money;
}
}
self.blockchain.lock().unwrap().chainState.insert(blocks[i].hash(), parent_state.clone());
self.states.lock().unwrap().accountMaping = parent_state.accountMaping.clone();
}
}
//insert new block to blockchain, so we need to remove SignedTransaction inside this block
let size = blocks[i].content.data.len();
for j in (0..size) {
if (self.mempool.lock().unwrap().contains_key(&blocks[i].content.data[j].hash())) {
self.mempool.lock().unwrap().remove(&blocks[i].content.data[j].hash());
}
}
let mut new_blockHash: Vec<H256> = Vec::new();
new_blockHash.push(blocks[i].hash());
self.server.broadcast(Message::NewBlockHashes(new_blockHash));
}
}
if (orphan_buffer.orphan_blocks.contains_key(&blocks[0].hash())) {
self.blockchain.lock().unwrap().insert(&orphan_buffer.orphan_blocks[&blocks[0].hash()]);
//remove corresponding txs in the inserted block from mempool
let size = blocks[0].content.data.len();
for j in (0..size) {
if (self.mempool.lock().unwrap().contains_key(&blocks[0].content.data[j].hash())) {
self.mempool.lock().unwrap().remove(&blocks[0].content.data[j].hash());
}
}
let mut new_blockHash_orphan: Vec<H256> = Vec::new();
new_blockHash_orphan.push(orphan_buffer.orphan_blocks[&blocks[0].hash()].hash());
self.server.broadcast(Message::NewBlockHashes(new_blockHash_orphan));
orphan_buffer.orphan_blocks.remove(&blocks[0].hash());
println!("The number of orphan blocks is decreased to {} blocks", orphan_buffer.orphan_blocks.len());
break;
}
}
}
let longest_chain = self.blockchain.lock().unwrap().all_blocks_in_longest_chain();
info!("{:?}", longest_chain);
println!("Total number of blocks in blockchain: {} blocks", self.blockchain.lock().unwrap().hash_blocks.len());
println!("The number of orphan blocks: {} blocks", orphan_buffer.orphan_blocks.len());
}
Message::NewTransactionHashes(trans_hashes) => {
debug!("NewTransactionHashes");
let size = trans_hashes.len();
for i in (0..size) {
let exist = self.mempool.lock().unwrap().contains_key(&trans_hashes[i]);
if(!exist)
{
peer.write(Message::GetTransactions(trans_hashes.clone()));
break;
}
}
}
Message::GetTransactions(get_trans) => {
debug!("GetTransactions");
let size = get_trans.len();
let mut exist = true;
for i in (0..size) {
if(!self.mempool.lock().unwrap().contains_key(&get_trans[i]))
{
exist = false;
break;
}
}
let mut exist_trans : Vec<SignedTransaction> = Vec::new();
if exist {
for i in (0..size) {
exist_trans.push(self.mempool.lock().unwrap()[&get_trans[i]].clone());
}
}
peer.write(Message::Transactions(exist_trans));
}
Message::Transactions(trans) => {
debug!("Transactions");
let size = trans.len();
let mut new_transHash: Vec<H256> = Vec::new();
let mut verified = true;
for i in (0..size) {
if(verify(&trans[i].Transaction, &trans[i].public_key, &trans[i].Signature)){
//put into mempool
self.mempool.lock().unwrap().insert(trans[i].hash(), trans[i].clone());
//self.txs.lock().unwrap().push_back(trans[i].clone());
new_transHash.push(trans[i].hash());
}
else {
//verify fail, need to ask the original node to send transaction again
verified = false;
break;
}
}
//If there is unverified transaction, pack it in vector and re-ask for it
if (!verified) {
let mut ask_trans : Vec<H256> = Vec::new();
for j in (0..size) {
ask_trans.push(trans[j].hash());
}
peer.write(Message::GetTransactions(ask_trans));
continue;
}
println!("mempool size: {}", self.mempool.lock().unwrap().len());
self.server.broadcast(Message::NewTransactionHashes(new_transHash));
}
}
}
}
}
| 54.305211 | 164 | 0.416221 |
679ac11299c40b97653a911dde2274ee6619b2c9 | 2,334 | use crate::data::PgPoolContainer;
use chrono::Utc;
use serenity::{
client::Context,
framework::standard::{macros::command, Args, CommandResult},
model::{channel::Message, id::UserId},
};
#[command("Profile")]
#[description = "Show your own or someone else's profile."]
#[example = ""]
#[example = "@mention"]
#[example = "user_id"]
#[min_args(0)]
#[max_args(1)]
async fn profile(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let user_id = match args.single::<UserId>() {
Ok(user_id) => user_id,
Err(_) => msg.author.id,
};
let user = match user_id.to_user(ctx).await {
Ok(user) => user,
Err(_) => {
msg.reply(ctx, "Could not find specified user.").await?;
return Ok(());
}
};
let data = ctx.data.read().await;
let pg_pool = match data.get::<PgPoolContainer>() {
Some(pg_pool) => pg_pool,
None => return Err("pg pool unavailable".into()),
};
match sqlx::query!(
"SELECT level, current_xp, xp_for_next_level, coins FROM users WHERE user_id = $1",
user_id.0 as i64
)
.fetch_one(pg_pool)
.await
{
Ok(result) => {
msg.channel_id
.send_message(ctx, |m| {
m.embed(|e| {
e.title("Profile");
e.description(format!("{}'s profile.", user));
e.field("Level:", result.level, false);
e.field(
"XP:",
format!("{}/{}", result.current_xp, result.xp_for_next_level),
false,
);
e.field("Coins:", result.coins, false);
e.footer(|f| {
f.text(format!("Requested by {}", msg.author.tag()));
f.icon_url(msg.author.face());
f
});
e.timestamp(&Utc::now());
e
});
m
})
.await?;
}
Err(_) => {
msg.reply(ctx, format!("{} does not have a profile.", user.tag()))
.await?;
}
}
Ok(())
}
| 29.175 | 91 | 0.441302 |
9ce6921a209025fa4fd60f0988aa29875e86bea0 | 13,156 | //! Board file for Nucleo-F446RE development board
//!
//! - <https://www.st.com/en/evaluation-tools/nucleo-f446re.html>
#![no_std]
// Disable this attribute when documenting, as a workaround for
// https://github.com/rust-lang/rust/issues/62184.
#![cfg_attr(not(doc), no_main)]
#![deny(missing_docs)]
use capsules::virtual_alarm::VirtualMuxAlarm;
use kernel::capabilities;
use kernel::common::dynamic_deferred_call::{DynamicDeferredCall, DynamicDeferredCallClientState};
use kernel::component::Component;
use kernel::hil::gpio::Configure;
use kernel::hil::led::LedHigh;
use kernel::Platform;
use kernel::{create_capability, debug, static_init};
use stm32f446re::interrupt_service::Stm32f446reDefaultPeripherals;
/// Support routines for debugging I/O.
pub mod io;
// Unit Tests for drivers.
#[allow(dead_code)]
mod multi_alarm_test;
#[allow(dead_code)]
mod virtual_uart_rx_test;
// Number of concurrent processes this platform supports.
const NUM_PROCS: usize = 4;
// Actual memory for holding the active process structures.
static mut PROCESSES: [Option<&'static dyn kernel::procs::ProcessType>; NUM_PROCS] =
[None, None, None, None];
// Static reference to chip for panic dumps.
static mut CHIP: Option<&'static stm32f446re::chip::Stm32f4xx<Stm32f446reDefaultPeripherals>> =
None;
// How should the kernel respond when a process faults.
const FAULT_RESPONSE: kernel::procs::FaultResponse = kernel::procs::FaultResponse::Panic;
/// Dummy buffer that causes the linker to reserve enough space for the stack.
#[no_mangle]
#[link_section = ".stack_buffer"]
pub static mut STACK_MEMORY: [u8; 0x2000] = [0; 0x2000];
/// A structure representing this platform that holds references to all
/// capsules for this platform.
struct NucleoF446RE {
console: &'static capsules::console::Console<'static>,
ipc: kernel::ipc::IPC,
led: &'static capsules::led::LedDriver<
'static,
LedHigh<'static, stm32f446re::gpio::Pin<'static>>,
>,
button: &'static capsules::button::Button<'static, stm32f446re::gpio::Pin<'static>>,
alarm: &'static capsules::alarm::AlarmDriver<
'static,
VirtualMuxAlarm<'static, stm32f446re::tim2::Tim2<'static>>,
>,
}
/// Mapping of integer syscalls to objects that implement syscalls.
impl Platform for NucleoF446RE {
fn with_driver<F, R>(&self, driver_num: usize, f: F) -> R
where
F: FnOnce(Option<&dyn kernel::Driver>) -> R,
{
match driver_num {
capsules::console::DRIVER_NUM => f(Some(self.console)),
capsules::led::DRIVER_NUM => f(Some(self.led)),
capsules::button::DRIVER_NUM => f(Some(self.button)),
capsules::alarm::DRIVER_NUM => f(Some(self.alarm)),
kernel::ipc::DRIVER_NUM => f(Some(&self.ipc)),
_ => f(None),
}
}
}
/// Helper function called during bring-up that configures DMA.
unsafe fn setup_dma(
dma: &stm32f446re::dma1::Dma1,
dma_streams: &'static [stm32f446re::dma1::Stream; 8],
usart2: &'static stm32f446re::usart::Usart,
) {
use stm32f446re::dma1::Dma1Peripheral;
use stm32f446re::usart;
dma.enable_clock();
let usart2_tx_stream = &dma_streams[Dma1Peripheral::USART2_TX.get_stream_idx()];
let usart2_rx_stream = &dma_streams[Dma1Peripheral::USART2_RX.get_stream_idx()];
usart2.set_dma(
usart::TxDMA(usart2_tx_stream),
usart::RxDMA(usart2_rx_stream),
);
usart2_tx_stream.set_client(usart2);
usart2_rx_stream.set_client(usart2);
usart2_tx_stream.setup(Dma1Peripheral::USART2_TX);
usart2_rx_stream.setup(Dma1Peripheral::USART2_RX);
cortexm4::nvic::Nvic::new(Dma1Peripheral::USART2_TX.get_stream_irqn()).enable();
cortexm4::nvic::Nvic::new(Dma1Peripheral::USART2_RX.get_stream_irqn()).enable();
}
/// Helper function called during bring-up that configures multiplexed I/O.
unsafe fn set_pin_primary_functions(
syscfg: &stm32f446re::syscfg::Syscfg,
exti: &stm32f446re::exti::Exti,
gpio_ports: &'static stm32f446re::gpio::GpioPorts<'static>,
) {
use stm32f446re::exti::LineId;
use stm32f446re::gpio::{AlternateFunction, Mode, PinId, PortId};
syscfg.enable_clock();
gpio_ports.get_port_from_port_id(PortId::A).enable_clock();
// User LD2 is connected to PA05. Configure PA05 as `debug_gpio!(0, ...)`
gpio_ports.get_pin(PinId::PA05).map(|pin| {
pin.make_output();
// Configure kernel debug gpios as early as possible
kernel::debug::assign_gpios(Some(pin), None, None);
});
// pa2 and pa3 (USART2) is connected to ST-LINK virtual COM port
gpio_ports.get_pin(PinId::PA02).map(|pin| {
pin.set_mode(Mode::AlternateFunctionMode);
// AF7 is USART2_TX
pin.set_alternate_function(AlternateFunction::AF7);
});
gpio_ports.get_pin(PinId::PA03).map(|pin| {
pin.set_mode(Mode::AlternateFunctionMode);
// AF7 is USART2_RX
pin.set_alternate_function(AlternateFunction::AF7);
});
gpio_ports.get_port_from_port_id(PortId::C).enable_clock();
// button is connected on pc13
gpio_ports.get_pin(PinId::PC13).map(|pin| {
// By default, upon reset, the pin is in input mode, with no internal
// pull-up, no internal pull-down (i.e., floating).
//
// Only set the mapping between EXTI line and the Pin and let capsule do
// the rest.
exti.associate_line_gpiopin(LineId::Exti13, pin);
});
// EXTI13 interrupts is delivered at IRQn 40 (EXTI15_10)
cortexm4::nvic::Nvic::new(stm32f446re::nvic::EXTI15_10).enable();
}
/// Helper function for miscellaneous peripheral functions
unsafe fn setup_peripherals(tim2: &stm32f446re::tim2::Tim2) {
// USART2 IRQn is 38
cortexm4::nvic::Nvic::new(stm32f446re::nvic::USART2).enable();
// TIM2 IRQn is 28
tim2.enable_clock();
tim2.start();
cortexm4::nvic::Nvic::new(stm32f446re::nvic::TIM2).enable();
}
/// Reset Handler.
///
/// This symbol is loaded into vector table by the STM32F446RE chip crate.
/// When the chip first powers on or later does a hard reset, after the core
/// initializes all the hardware, the address of this function is loaded and
/// execution begins here.
#[no_mangle]
pub unsafe fn reset_handler() {
stm32f446re::init();
// We use the default HSI 16Mhz clock
let rcc = static_init!(stm32f446re::rcc::Rcc, stm32f446re::rcc::Rcc::new());
let syscfg = static_init!(
stm32f446re::syscfg::Syscfg,
stm32f446re::syscfg::Syscfg::new(rcc)
);
let exti = static_init!(
stm32f446re::exti::Exti,
stm32f446re::exti::Exti::new(syscfg)
);
let dma1 = static_init!(stm32f446re::dma1::Dma1, stm32f446re::dma1::Dma1::new(rcc));
let peripherals = static_init!(
Stm32f446reDefaultPeripherals,
Stm32f446reDefaultPeripherals::new(rcc, exti, dma1)
);
peripherals.init();
let base_peripherals = &peripherals.stm32f4;
setup_peripherals(&base_peripherals.tim2);
set_pin_primary_functions(syscfg, &base_peripherals.exti, &base_peripherals.gpio_ports);
setup_dma(
dma1,
&base_peripherals.dma_streams,
&base_peripherals.usart2,
);
let board_kernel = static_init!(kernel::Kernel, kernel::Kernel::new(&PROCESSES));
let dynamic_deferred_call_clients =
static_init!([DynamicDeferredCallClientState; 2], Default::default());
let dynamic_deferred_caller = static_init!(
DynamicDeferredCall,
DynamicDeferredCall::new(dynamic_deferred_call_clients)
);
DynamicDeferredCall::set_global_instance(dynamic_deferred_caller);
let chip = static_init!(
stm32f446re::chip::Stm32f4xx<Stm32f446reDefaultPeripherals>,
stm32f446re::chip::Stm32f4xx::new(peripherals)
);
CHIP = Some(chip);
// UART
// Create a shared UART channel for kernel debug.
base_peripherals.usart2.enable_clock();
let uart_mux = components::console::UartMuxComponent::new(
&base_peripherals.usart2,
115200,
dynamic_deferred_caller,
)
.finalize(());
// `finalize()` configures the underlying USART, so we need to
// tell `send_byte()` not to configure the USART again.
io::WRITER.set_initialized();
// Create capabilities that the board needs to call certain protected kernel
// functions.
let memory_allocation_capability = create_capability!(capabilities::MemoryAllocationCapability);
let main_loop_capability = create_capability!(capabilities::MainLoopCapability);
let process_management_capability =
create_capability!(capabilities::ProcessManagementCapability);
// Setup the console.
let console = components::console::ConsoleComponent::new(board_kernel, uart_mux).finalize(());
// Create the debugger object that handles calls to `debug!()`.
components::debug_writer::DebugWriterComponent::new(uart_mux).finalize(());
// // Setup the process inspection console
// let process_console_uart = static_init!(UartDevice, UartDevice::new(mux_uart, true));
// process_console_uart.setup();
// pub struct ProcessConsoleCapability;
// unsafe impl capabilities::ProcessManagementCapability for ProcessConsoleCapability {}
// let process_console = static_init!(
// capsules::process_console::ProcessConsole<'static, ProcessConsoleCapability>,
// capsules::process_console::ProcessConsole::new(
// process_console_uart,
// &mut capsules::process_console::WRITE_BUF,
// &mut capsules::process_console::READ_BUF,
// &mut capsules::process_console::COMMAND_BUF,
// board_kernel,
// ProcessConsoleCapability,
// )
// );
// hil::uart::Transmit::set_transmit_client(process_console_uart, process_console);
// hil::uart::Receive::set_receive_client(process_console_uart, process_console);
// process_console.start();
// LEDs
let gpio_ports = &base_peripherals.gpio_ports;
// Clock to Port A is enabled in `set_pin_primary_functions()`
let led = components::led::LedsComponent::new(components::led_component_helper!(
LedHigh<'static, stm32f446re::gpio::Pin>,
LedHigh::new(gpio_ports.get_pin(stm32f446re::gpio::PinId::PA05).unwrap()),
))
.finalize(components::led_component_buf!(
LedHigh<'static, stm32f446re::gpio::Pin>
));
// BUTTONs
let button = components::button::ButtonComponent::new(
board_kernel,
components::button_component_helper!(
stm32f446re::gpio::Pin,
(
gpio_ports.get_pin(stm32f446re::gpio::PinId::PC13).unwrap(),
kernel::hil::gpio::ActivationMode::ActiveLow,
kernel::hil::gpio::FloatingState::PullNone
)
),
)
.finalize(components::button_component_buf!(stm32f446re::gpio::Pin));
// ALARM
let tim2 = &base_peripherals.tim2;
let mux_alarm = components::alarm::AlarmMuxComponent::new(tim2).finalize(
components::alarm_mux_component_helper!(stm32f446re::tim2::Tim2),
);
let alarm = components::alarm::AlarmDriverComponent::new(board_kernel, mux_alarm)
.finalize(components::alarm_component_helper!(stm32f446re::tim2::Tim2));
let nucleo_f446re = NucleoF446RE {
console: console,
ipc: kernel::ipc::IPC::new(board_kernel, &memory_allocation_capability),
led: led,
button: button,
alarm: alarm,
};
// // Optional kernel tests
// //
// // See comment in `boards/imix/src/main.rs`
// virtual_uart_rx_test::run_virtual_uart_receive(mux_uart);
debug!("Initialization complete. Entering main loop");
/// These symbols are defined in the linker script.
extern "C" {
/// Beginning of the ROM region containing app images.
static _sapps: u8;
/// End of the ROM region containing app images.
static _eapps: u8;
/// Beginning of the RAM region for app memory.
static mut _sappmem: u8;
/// End of the RAM region for app memory.
static _eappmem: u8;
}
kernel::procs::load_processes(
board_kernel,
chip,
core::slice::from_raw_parts(
&_sapps as *const u8,
&_eapps as *const u8 as usize - &_sapps as *const u8 as usize,
),
core::slice::from_raw_parts_mut(
&mut _sappmem as *mut u8,
&_eappmem as *const u8 as usize - &_sappmem as *const u8 as usize,
),
&mut PROCESSES,
FAULT_RESPONSE,
&process_management_capability,
)
.unwrap_or_else(|err| {
debug!("Error loading processes!");
debug!("{:?}", err);
});
let scheduler = components::sched::round_robin::RoundRobinComponent::new(&PROCESSES)
.finalize(components::rr_component_helper!(NUM_PROCS));
//Uncomment to run multi alarm test
//multi_alarm_test::run_multi_alarm(mux_alarm);
board_kernel.kernel_loop(
&nucleo_f446re,
chip,
Some(&nucleo_f446re.ipc),
scheduler,
&main_loop_capability,
);
}
| 36.043836 | 100 | 0.675509 |
1e3222abf7107bb1711adc4f71f45eb8a2a86d02 | 6,069 | use messages::*;
use messages::message_type::MessageTypes;
use settings;
use utils::httpclient;
use error::prelude::*;
#[derive(Clone, Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct UpdateConnection {
#[serde(rename = "@type")]
msg_type: MessageTypes,
#[serde(rename = "statusCode")]
status_code: ConnectionStatus,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ConnectionStatus {
AlreadyConnected,
NotConnected,
Deleted,
}
impl Serialize for ConnectionStatus {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer {
let value = match self {
ConnectionStatus::AlreadyConnected => "CS-101",
ConnectionStatus::NotConnected => "CS-102",
ConnectionStatus::Deleted => "CS-103",
};
serde_json::Value::String(value.to_string()).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for ConnectionStatus {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> {
let value = Value::deserialize(deserializer).map_err(de::Error::custom)?;
match value.as_str() {
Some("CS-101") => Ok(ConnectionStatus::AlreadyConnected),
Some("CS-102") => Ok(ConnectionStatus::NotConnected),
Some("CS-103") => Ok(ConnectionStatus::Deleted),
_ => Err(de::Error::custom("Unexpected message type."))
}
}
}
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct UpdateConnectionResponse {
#[serde(rename = "@type")]
msg_type: MessageTypes,
#[serde(rename = "statusCode")]
status_code: ConnectionStatus,
}
#[derive(Debug)]
pub struct DeleteConnectionBuilder {
to_did: String,
to_vk: String,
status_code: ConnectionStatus,
agent_did: String,
agent_vk: String,
version: settings::ProtocolTypes,
}
impl DeleteConnectionBuilder {
pub fn create() -> DeleteConnectionBuilder {
trace!("DeleteConnection::create_message >>>");
DeleteConnectionBuilder {
to_did: String::new(),
to_vk: String::new(),
status_code: ConnectionStatus::Deleted,
agent_did: String::new(),
agent_vk: String::new(),
version: settings::get_protocol_type()
}
}
pub fn version(&mut self, version: &Option<settings::ProtocolTypes>) -> VcxResult<&mut Self> {
self.version = match version {
Some(version) => version.clone(),
None => settings::get_protocol_type()
};
Ok(self)
}
pub fn send_secure(&mut self) -> VcxResult<()> {
trace!("DeleteConnection::send >>>");
if settings::test_agency_mode_enabled() {
return Ok(());
}
let data = self.prepare_request()?;
let response = httpclient::post_u8(&data)?;
self.parse_response(&response)
}
fn parse_response(&self, response: &Vec<u8>) -> VcxResult<()> {
trace!("parse_create_keys_response >>>");
let mut response = parse_response_from_agency(response, &self.version)?;
match response.remove(0) {
A2AMessage::Version1(A2AMessageV1::UpdateConnectionResponse(_)) => Ok(()),
A2AMessage::Version2(A2AMessageV2::UpdateConnectionResponse(_)) => Ok(()),
_ => Err(VcxError::from_msg(VcxErrorKind::InvalidHttpResponse, "Message does not match any variant of UpdateConnectionResponse"))
}
}
}
pub fn send_delete_connection_message(pw_did: &str, pw_verkey: &str, agent_did: &str, agent_vk: &str) -> VcxResult<()> {
delete_connection()
.to(pw_did)?
.to_vk(pw_verkey)?
.agent_did(agent_did)?
.agent_vk(agent_vk)?
.send_secure()
.map_err(|err| err.extend("Cannot delete connection"))
}
//TODO Every GeneralMessage extension, duplicates code
impl GeneralMessage for DeleteConnectionBuilder {
type Msg = DeleteConnectionBuilder;
fn set_agent_did(&mut self, did: String) {
self.agent_did = did;
}
fn set_agent_vk(&mut self, vk: String) {
self.agent_vk = vk;
}
fn set_to_did(&mut self, to_did: String) { self.to_did = to_did; }
fn set_to_vk(&mut self, to_vk: String) { self.to_vk = to_vk; }
fn prepare_request(&mut self) -> VcxResult<Vec<u8>> {
let message = match self.version {
settings::ProtocolTypes::V1 =>
A2AMessage::Version1(
A2AMessageV1::UpdateConnection(
UpdateConnection {
msg_type: MessageTypes::build(A2AMessageKinds::UpdateConnectionStatus),
status_code: self.status_code.clone(),
}
)
),
settings::ProtocolTypes::V2 =>
A2AMessage::Version2(
A2AMessageV2::UpdateConnection(
UpdateConnection {
msg_type: MessageTypes::build(A2AMessageKinds::UpdateConnectionStatus),
status_code: self.status_code.clone(),
}
)
)
};
prepare_message_for_agent(vec![message], &self.to_vk, &self.agent_did, &self.agent_vk, &self.version)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_deserialize_delete_connection_payload() {
let payload = vec![130, 165, 64, 116, 121, 112, 101, 130, 164, 110, 97, 109, 101, 179, 67, 79, 78, 78, 95, 83, 84, 65, 84, 85, 83, 95, 85, 80, 68, 65, 84, 69, 68, 163, 118, 101, 114, 163, 49, 46, 48, 170, 115, 116, 97, 116, 117, 115, 67, 111, 100, 101, 166, 67, 83, 45, 49, 48, 51];
let msg_str = r#"{ "@type": { "name": "CONN_STATUS_UPDATED", "ver": "1.0" }, "statusCode": "CS-103" }"#;
let delete_connection_payload: UpdateConnectionResponse = serde_json::from_str(&msg_str).unwrap();
assert_eq!(delete_connection_payload, rmp_serde::from_slice(&payload).unwrap());
}
}
| 34.482955 | 290 | 0.600923 |
720c6e302a983111c2b5f6113e4e77c320cfa7e9 | 897 | //! Re-export version of a base64 encoding used throughout the library.
pub(crate) mod base64 {
pub use base64::DecodeError;
fn div_and_round_up(dividend: usize, divisor: usize) -> usize {
(dividend + (divisor - 1)) / divisor
}
pub fn encode_no_padding(data: &[u8]) -> Vec<u8> {
let mut encoded = vec![0; div_and_round_up(data.len(), 3) * 4];
let bytes_written =
base64::encode_config_slice(data, base64::STANDARD_NO_PAD, &mut encoded);
encoded.resize(bytes_written, 0);
encoded
}
pub fn decode_no_padding(data: &[u8]) -> Result<Vec<u8>, base64::DecodeError> {
let mut decoded = vec![0; div_and_round_up(data.len(), 4) * 3];
let bytes_written =
base64::decode_config_slice(data, base64::STANDARD_NO_PAD, &mut decoded)?;
decoded.resize(bytes_written, 0);
Ok(decoded)
}
}
| 35.88 | 86 | 0.626533 |
03b2bc8de45c74fca3a25459902eccf23eb4843c | 30,919 | // Rust test file autogenerated with cargo build (build/spectests.rs).
// Please do NOT modify it by hand, as it will be reseted on next build.
// Test based on spectests/const_.wast
#![allow(
warnings,
dead_code
)]
use wabt::wat2wasm;
use super::_common::{spectest_importobject, NaNCheck};
use crate::webassembly::{compile, instantiate, Export, ImportObject, Instance, ResultObject};
// Line 5
fn create_module_1() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i32.const -1
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_1(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 6
#[test]
fn test_module_1() {
let result_object = create_module_1();
// We group the calls together
start_module_1(&result_object);
}
fn create_module_2() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i32.const -2147483648
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_2(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 8
#[test]
fn c2_l8_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 51, 50, 46, 99, 111, 110, 115, 116, 32, 48, 120, 49,
48, 48, 48, 48, 48, 48, 48, 48, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 12
#[test]
fn c3_l12_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 51, 50, 46, 99, 111, 110, 115, 116, 32, 45, 48, 120,
56, 48, 48, 48, 48, 48, 48, 49, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 16
#[test]
fn test_module_2() {
let result_object = create_module_2();
// We group the calls together
start_module_2(&result_object);
}
fn create_module_3() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i32.const -1
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_3(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 17
#[test]
fn test_module_3() {
let result_object = create_module_3();
// We group the calls together
start_module_3(&result_object);
}
fn create_module_4() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i32.const -2147483648
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_4(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 19
#[test]
fn c6_l19_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 51, 50, 46, 99, 111, 110, 115, 116, 32, 52, 50, 57, 52,
57, 54, 55, 50, 57, 54, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 23
#[test]
fn c7_l23_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 51, 50, 46, 99, 111, 110, 115, 116, 32, 45, 50, 49, 52,
55, 52, 56, 51, 54, 52, 57, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 27
#[test]
fn test_module_4() {
let result_object = create_module_4();
// We group the calls together
start_module_4(&result_object);
}
fn create_module_5() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i64.const -1
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_5(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 28
#[test]
fn test_module_5() {
let result_object = create_module_5();
// We group the calls together
start_module_5(&result_object);
}
fn create_module_6() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i64.const -9223372036854775808
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_6(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 30
#[test]
fn c10_l30_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 54, 52, 46, 99, 111, 110, 115, 116, 32, 48, 120, 49,
48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 41, 32, 100, 114, 111, 112,
41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 34
#[test]
fn c11_l34_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 54, 52, 46, 99, 111, 110, 115, 116, 32, 45, 48, 120,
56, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 49, 41, 32, 100, 114, 111, 112,
41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 38
#[test]
fn test_module_6() {
let result_object = create_module_6();
// We group the calls together
start_module_6(&result_object);
}
fn create_module_7() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i64.const -1
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_7(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 39
#[test]
fn test_module_7() {
let result_object = create_module_7();
// We group the calls together
start_module_7(&result_object);
}
fn create_module_8() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
i64.const -9223372036854775808
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_8(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 41
#[test]
fn c14_l41_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 54, 52, 46, 99, 111, 110, 115, 116, 32, 49, 56, 52, 52,
54, 55, 52, 52, 48, 55, 51, 55, 48, 57, 53, 53, 49, 54, 49, 54, 41, 32, 100, 114, 111, 112,
41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 45
#[test]
fn c15_l45_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 105, 54, 52, 46, 99, 111, 110, 115, 116, 32, 45, 57, 50, 50,
51, 51, 55, 50, 48, 51, 54, 56, 53, 52, 55, 55, 53, 56, 48, 57, 41, 32, 100, 114, 111, 112,
41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 49
#[test]
fn test_module_8() {
let result_object = create_module_8();
// We group the calls together
start_module_8(&result_object);
}
fn create_module_9() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const 0x1p+127 (;=1.70141e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_9(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 50
#[test]
fn test_module_9() {
let result_object = create_module_9();
// We group the calls together
start_module_9(&result_object);
}
fn create_module_10() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const -0x1p+127 (;=-1.70141e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_10(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 51
#[test]
fn test_module_10() {
let result_object = create_module_10();
// We group the calls together
start_module_10(&result_object);
}
fn create_module_11() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const 0x1.fffffep+127 (;=3.40282e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_11(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 52
#[test]
fn test_module_11() {
let result_object = create_module_11();
// We group the calls together
start_module_11(&result_object);
}
fn create_module_12() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const -0x1.fffffep+127 (;=-3.40282e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_12(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 53
#[test]
fn test_module_12() {
let result_object = create_module_12();
// We group the calls together
start_module_12(&result_object);
}
fn create_module_13() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const 0x1.fffffep+127 (;=3.40282e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_13(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 54
#[test]
fn test_module_13() {
let result_object = create_module_13();
// We group the calls together
start_module_13(&result_object);
}
fn create_module_14() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const -0x1.fffffep+127 (;=-3.40282e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_14(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 56
#[test]
fn c22_l56_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 48, 120, 49,
112, 49, 50, 56, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 60
#[test]
fn c23_l60_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 45, 48, 120,
49, 112, 49, 50, 56, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 64
#[test]
fn c24_l64_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 48, 120, 49,
46, 102, 102, 102, 102, 102, 102, 112, 49, 50, 55, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 68
#[test]
fn c25_l68_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 45, 48, 120,
49, 46, 102, 102, 102, 102, 102, 102, 112, 49, 50, 55, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 72
#[test]
fn test_module_14() {
let result_object = create_module_14();
// We group the calls together
start_module_14(&result_object);
}
fn create_module_15() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const 0x1.2ced32p+126 (;=1e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_15(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 73
#[test]
fn test_module_15() {
let result_object = create_module_15();
// We group the calls together
start_module_15(&result_object);
}
fn create_module_16() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const -0x1.2ced32p+126 (;=-1e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_16(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 75
#[test]
fn c28_l75_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 49, 101, 51,
57, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 79
#[test]
fn c29_l79_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 45, 49, 101,
51, 57, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 83
#[test]
fn test_module_16() {
let result_object = create_module_16();
// We group the calls together
start_module_16(&result_object);
}
fn create_module_17() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const 0x1.fffffep+127 (;=3.40282e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_17(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 84
#[test]
fn test_module_17() {
let result_object = create_module_17();
// We group the calls together
start_module_17(&result_object);
}
fn create_module_18() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f32.const -0x1.fffffep+127 (;=-3.40282e+38;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_18(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 86
#[test]
fn c32_l86_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 51, 52, 48, 50,
56, 50, 51, 53, 54, 55, 55, 57, 55, 51, 51, 54, 54, 49, 54, 51, 55, 53, 51, 57, 51, 57, 53,
52, 53, 56, 49, 52, 50, 53, 54, 56, 52, 52, 56, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 90
#[test]
fn c33_l90_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 51, 50, 46, 99, 111, 110, 115, 116, 32, 45, 51, 52, 48,
50, 56, 50, 51, 53, 54, 55, 55, 57, 55, 51, 51, 54, 54, 49, 54, 51, 55, 53, 51, 57, 51, 57,
53, 52, 53, 56, 49, 52, 50, 53, 54, 56, 52, 52, 56, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 94
#[test]
fn test_module_18() {
let result_object = create_module_18();
// We group the calls together
start_module_18(&result_object);
}
fn create_module_19() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const 0x1p+1023 (;=8.98847e+307;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_19(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 95
#[test]
fn test_module_19() {
let result_object = create_module_19();
// We group the calls together
start_module_19(&result_object);
}
fn create_module_20() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const -0x1p+1023 (;=-8.98847e+307;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_20(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 96
#[test]
fn test_module_20() {
let result_object = create_module_20();
// We group the calls together
start_module_20(&result_object);
}
fn create_module_21() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const 0x1.fffffffffffffp+1023 (;=1.79769e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_21(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 97
#[test]
fn test_module_21() {
let result_object = create_module_21();
// We group the calls together
start_module_21(&result_object);
}
fn create_module_22() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const -0x1.fffffffffffffp+1023 (;=-1.79769e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_22(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 98
#[test]
fn test_module_22() {
let result_object = create_module_22();
// We group the calls together
start_module_22(&result_object);
}
fn create_module_23() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const 0x1.fffffffffffffp+1023 (;=1.79769e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_23(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 99
#[test]
fn test_module_23() {
let result_object = create_module_23();
// We group the calls together
start_module_23(&result_object);
}
fn create_module_24() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const -0x1.fffffffffffffp+1023 (;=-1.79769e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_24(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 101
#[test]
fn c40_l101_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 48, 120, 49,
112, 49, 48, 50, 52, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 105
#[test]
fn c41_l105_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 45, 48, 120,
49, 112, 49, 48, 50, 52, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 109
#[test]
fn c42_l109_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 48, 120, 49,
46, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 56, 112, 49, 48, 50,
51, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 113
#[test]
fn c43_l113_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 45, 48, 120,
49, 46, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 56, 112, 49, 48,
50, 51, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 117
#[test]
fn test_module_24() {
let result_object = create_module_24();
// We group the calls together
start_module_24(&result_object);
}
fn create_module_25() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const 0x1.1ccf385ebc8ap+1023 (;=1e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_25(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 118
#[test]
fn test_module_25() {
let result_object = create_module_25();
// We group the calls together
start_module_25(&result_object);
}
fn create_module_26() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const -0x1.1ccf385ebc8ap+1023 (;=-1e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_26(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 120
#[test]
fn c46_l120_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 49, 101, 51,
48, 57, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 124
#[test]
fn c47_l124_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 45, 49, 101,
51, 48, 57, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 128
#[test]
fn test_module_26() {
let result_object = create_module_26();
// We group the calls together
start_module_26(&result_object);
}
fn create_module_27() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const 0x1.fffffffffffffp+1023 (;=1.79769e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_27(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 129
#[test]
fn test_module_27() {
let result_object = create_module_27();
// We group the calls together
start_module_27(&result_object);
}
fn create_module_28() -> ResultObject {
let module_str = "(module
(type (;0;) (func))
(func (;0;) (type 0)
f64.const -0x1.fffffffffffffp+1023 (;=-1.79769e+308;)
drop))
";
let wasm_binary = wat2wasm(module_str.as_bytes()).expect("WAST not valid or malformed");
instantiate(wasm_binary, spectest_importobject(), None).expect("WASM can't be instantiated")
}
fn start_module_28(result_object: &ResultObject) {
result_object.instance.start();
}
// Line 131
#[test]
fn c50_l131_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 50, 54, 57, 54,
53, 51, 57, 55, 48, 50, 50, 57, 51, 52, 55, 51, 53, 54, 50, 50, 49, 55, 57, 49, 49, 51, 53,
53, 57, 55, 53, 53, 54, 53, 51, 53, 49, 57, 55, 49, 48, 53, 56, 53, 49, 50, 56, 56, 55, 54,
55, 52, 57, 52, 56, 57, 56, 51, 55, 54, 50, 49, 53, 50, 48, 52, 55, 51, 53, 56, 57, 49, 49,
55, 48, 48, 52, 50, 56, 48, 56, 49, 52, 48, 56, 56, 52, 51, 51, 55, 57, 52, 57, 49, 53, 48,
51, 49, 55, 50, 53, 55, 51, 49, 48, 54, 56, 56, 52, 51, 48, 50, 55, 49, 53, 55, 51, 54, 57,
54, 51, 53, 49, 52, 56, 49, 57, 57, 48, 51, 51, 52, 49, 57, 54, 50, 55, 52, 49, 53, 50, 55,
48, 49, 51, 50, 48, 48, 53, 53, 51, 48, 54, 50, 55, 53, 52, 55, 57, 48, 55, 52, 56, 54, 53,
56, 54, 52, 56, 50, 54, 57, 50, 51, 49, 49, 52, 51, 54, 56, 50, 51, 53, 49, 51, 53, 53, 56,
51, 57, 57, 51, 52, 49, 54, 49, 49, 51, 56, 48, 50, 55, 54, 50, 54, 56, 50, 55, 48, 48, 57,
49, 51, 52, 53, 54, 56, 55, 52, 56, 53, 53, 51, 53, 52, 56, 51, 52, 52, 50, 50, 50, 52, 56,
55, 49, 50, 56, 51, 56, 57, 57, 56, 49, 56, 53, 48, 50, 50, 52, 49, 50, 49, 57, 54, 55, 51,
57, 51, 48, 54, 50, 49, 55, 48, 56, 52, 55, 53, 51, 49, 48, 55, 50, 54, 53, 55, 55, 49, 51,
55, 56, 57, 52, 57, 56, 50, 49, 56, 55, 53, 54, 48, 54, 48, 51, 57, 50, 55, 54, 49, 56, 55,
50, 56, 55, 53, 53, 50, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
// Line 135
#[test]
fn c51_l135_assert_malformed() {
let wasm_binary = [
40, 102, 117, 110, 99, 32, 40, 102, 54, 52, 46, 99, 111, 110, 115, 116, 32, 45, 50, 54, 57,
54, 53, 51, 57, 55, 48, 50, 50, 57, 51, 52, 55, 51, 53, 54, 50, 50, 49, 55, 57, 49, 49, 51,
53, 53, 57, 55, 53, 53, 54, 53, 51, 53, 49, 57, 55, 49, 48, 53, 56, 53, 49, 50, 56, 56, 55,
54, 55, 52, 57, 52, 56, 57, 56, 51, 55, 54, 50, 49, 53, 50, 48, 52, 55, 51, 53, 56, 57, 49,
49, 55, 48, 48, 52, 50, 56, 48, 56, 49, 52, 48, 56, 56, 52, 51, 51, 55, 57, 52, 57, 49, 53,
48, 51, 49, 55, 50, 53, 55, 51, 49, 48, 54, 56, 56, 52, 51, 48, 50, 55, 49, 53, 55, 51, 54,
57, 54, 51, 53, 49, 52, 56, 49, 57, 57, 48, 51, 51, 52, 49, 57, 54, 50, 55, 52, 49, 53, 50,
55, 48, 49, 51, 50, 48, 48, 53, 53, 51, 48, 54, 50, 55, 53, 52, 55, 57, 48, 55, 52, 56, 54,
53, 56, 54, 52, 56, 50, 54, 57, 50, 51, 49, 49, 52, 51, 54, 56, 50, 51, 53, 49, 51, 53, 53,
56, 51, 57, 57, 51, 52, 49, 54, 49, 49, 51, 56, 48, 50, 55, 54, 50, 54, 56, 50, 55, 48, 48,
57, 49, 51, 52, 53, 54, 56, 55, 52, 56, 53, 53, 51, 53, 52, 56, 51, 52, 52, 50, 50, 50, 52,
56, 55, 49, 50, 56, 51, 56, 57, 57, 56, 49, 56, 53, 48, 50, 50, 52, 49, 50, 49, 57, 54, 55,
51, 57, 51, 48, 54, 50, 49, 55, 48, 56, 52, 55, 53, 51, 49, 48, 55, 50, 54, 53, 55, 55, 49,
51, 55, 56, 57, 52, 57, 56, 50, 49, 56, 55, 53, 54, 48, 54, 48, 51, 57, 50, 55, 54, 49, 56,
55, 50, 56, 55, 53, 53, 50, 41, 32, 100, 114, 111, 112, 41,
];
let compilation = compile(wasm_binary.to_vec());
assert!(
compilation.is_err(),
"WASM should not compile as is malformed"
);
}
#[test]
fn test_module_28() {
let result_object = create_module_28();
// We group the calls together
start_module_28(&result_object);
}
| 30.135478 | 99 | 0.610401 |
03b578d4adeec235d60aa34654cfbdc1962c2845 | 95,116 | use crate::diagnostics::{ImportSuggestion, LabelSuggestion, TypoSuggestion};
use crate::late::lifetimes::{ElisionFailureInfo, LifetimeContext};
use crate::late::{AliasPossibility, LateResolutionVisitor, RibKind};
use crate::path_names_to_string;
use crate::{CrateLint, Module, ModuleKind, ModuleOrUniformRoot};
use crate::{PathResult, PathSource, Segment};
use rustc_ast::visit::FnKind;
use rustc_ast::{
self as ast, Expr, ExprKind, GenericParam, GenericParamKind, Item, ItemKind, NodeId, Path, Ty,
TyKind,
};
use rustc_ast_pretty::pprust::path_segment_to_string;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, SuggestionStyle};
use rustc_hir as hir;
use rustc_hir::def::Namespace::{self, *};
use rustc_hir::def::{self, CtorKind, CtorOf, DefKind};
use rustc_hir::def_id::{DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc_hir::PrimTy;
use rustc_session::parse::feature_err;
use rustc_span::edition::Edition;
use rustc_span::hygiene::MacroKind;
use rustc_span::lev_distance::find_best_match_for_name;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, MultiSpan, Span, DUMMY_SP};
use std::iter;
use tracing::debug;
type Res = def::Res<ast::NodeId>;
/// A field or associated item from self type suggested in case of resolution failure.
enum AssocSuggestion {
Field,
MethodWithSelf,
AssocFn,
AssocType,
AssocConst,
}
impl AssocSuggestion {
fn action(&self) -> &'static str {
match self {
AssocSuggestion::Field => "use the available field",
AssocSuggestion::MethodWithSelf => "call the method with the fully-qualified path",
AssocSuggestion::AssocFn => "call the associated function",
AssocSuggestion::AssocConst => "use the associated `const`",
AssocSuggestion::AssocType => "use the associated type",
}
}
}
crate enum MissingLifetimeSpot<'tcx> {
Generics(&'tcx hir::Generics<'tcx>),
HigherRanked { span: Span, span_type: ForLifetimeSpanType },
Static,
}
crate enum ForLifetimeSpanType {
BoundEmpty,
BoundTail,
TypeEmpty,
TypeTail,
}
impl ForLifetimeSpanType {
crate fn descr(&self) -> &'static str {
match self {
Self::BoundEmpty | Self::BoundTail => "bound",
Self::TypeEmpty | Self::TypeTail => "type",
}
}
crate fn suggestion(&self, sugg: &str) -> String {
match self {
Self::BoundEmpty | Self::TypeEmpty => format!("for<{}> ", sugg),
Self::BoundTail | Self::TypeTail => format!(", {}", sugg),
}
}
}
impl<'tcx> Into<MissingLifetimeSpot<'tcx>> for &'tcx hir::Generics<'tcx> {
fn into(self) -> MissingLifetimeSpot<'tcx> {
MissingLifetimeSpot::Generics(self)
}
}
fn is_self_type(path: &[Segment], namespace: Namespace) -> bool {
namespace == TypeNS && path.len() == 1 && path[0].ident.name == kw::SelfUpper
}
fn is_self_value(path: &[Segment], namespace: Namespace) -> bool {
namespace == ValueNS && path.len() == 1 && path[0].ident.name == kw::SelfLower
}
/// Gets the stringified path for an enum from an `ImportSuggestion` for an enum variant.
fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, String) {
let variant_path = &suggestion.path;
let variant_path_string = path_names_to_string(variant_path);
let path_len = suggestion.path.segments.len();
let enum_path = ast::Path {
span: suggestion.path.span,
segments: suggestion.path.segments[0..path_len - 1].to_vec(),
tokens: None,
};
let enum_path_string = path_names_to_string(&enum_path);
(variant_path_string, enum_path_string)
}
impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn def_span(&self, def_id: DefId) -> Option<Span> {
match def_id.krate {
LOCAL_CRATE => self.r.opt_span(def_id),
_ => Some(
self.r
.session
.source_map()
.guess_head_span(self.r.cstore().get_span_untracked(def_id, self.r.session)),
),
}
}
/// Handles error reporting for `smart_resolve_path_fragment` function.
/// Creates base error and amends it with one short label and possibly some longer helps/notes.
pub(crate) fn smart_resolve_report_errors(
&mut self,
path: &[Segment],
span: Span,
source: PathSource<'_>,
res: Option<Res>,
) -> (DiagnosticBuilder<'a>, Vec<ImportSuggestion>) {
let ident_span = path.last().map_or(span, |ident| ident.ident.span);
let ns = source.namespace();
let is_expected = &|res| source.is_expected(res);
let is_enum_variant = &|res| matches!(res, Res::Def(DefKind::Variant, _));
// Make the base error.
let mut expected = source.descr_expected();
let path_str = Segment::names_to_string(path);
let item_str = path.last().unwrap().ident;
let (base_msg, fallback_label, base_span, could_be_expr) = if let Some(res) = res {
(
format!("expected {}, found {} `{}`", expected, res.descr(), path_str),
format!("not a {}", expected),
span,
match res {
Res::Def(DefKind::Fn, _) => {
// Verify whether this is a fn call or an Fn used as a type.
self.r
.session
.source_map()
.span_to_snippet(span)
.map(|snippet| snippet.ends_with(')'))
.unwrap_or(false)
}
Res::Def(
DefKind::Ctor(..) | DefKind::AssocFn | DefKind::Const | DefKind::AssocConst,
_,
)
| Res::SelfCtor(_)
| Res::PrimTy(_)
| Res::Local(_) => true,
_ => false,
},
)
} else {
let item_span = path.last().unwrap().ident.span;
let (mod_prefix, mod_str) = if path.len() == 1 {
(String::new(), "this scope".to_string())
} else if path.len() == 2 && path[0].ident.name == kw::PathRoot {
if self.r.session.edition() > Edition::Edition2015 {
// In edition 2018 onwards, the `::foo` syntax may only pull from the extern prelude
// which overrides all other expectations of item type
expected = "crate";
(String::new(), "the list of imported crates".to_string())
} else {
(String::new(), "the crate root".to_string())
}
} else if path.len() == 2 && path[0].ident.name == kw::Crate {
(String::new(), "the crate root".to_string())
} else {
let mod_path = &path[..path.len() - 1];
let mod_prefix =
match self.resolve_path(mod_path, Some(TypeNS), false, span, CrateLint::No) {
PathResult::Module(ModuleOrUniformRoot::Module(module)) => module.res(),
_ => None,
}
.map_or_else(String::new, |res| format!("{} ", res.descr()));
(mod_prefix, format!("`{}`", Segment::names_to_string(mod_path)))
};
(
format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
if path_str == "async" && expected.starts_with("struct") {
"`async` blocks are only allowed in Rust 2018 or later".to_string()
} else {
format!("not found in {}", mod_str)
},
item_span,
false,
)
};
let code = source.error_code(res.is_some());
let mut err = self.r.session.struct_span_err_with_code(base_span, &base_msg, code);
match (source, self.diagnostic_metadata.in_if_condition) {
(PathSource::Expr(_), Some(Expr { span, kind: ExprKind::Assign(..), .. })) => {
err.span_suggestion_verbose(
span.shrink_to_lo(),
"you might have meant to use pattern matching",
"let ".to_string(),
Applicability::MaybeIncorrect,
);
self.r.session.if_let_suggestions.borrow_mut().insert(*span);
}
_ => {}
}
let is_assoc_fn = self.self_type_is_available(span);
// Emit help message for fake-self from other languages (e.g., `this` in Javascript).
if ["this", "my"].contains(&&*item_str.as_str()) && is_assoc_fn {
err.span_suggestion_short(
span,
"you might have meant to use `self` here instead",
"self".to_string(),
Applicability::MaybeIncorrect,
);
if !self.self_value_is_available(path[0].ident.span, span) {
if let Some((FnKind::Fn(_, _, sig, ..), fn_span)) =
&self.diagnostic_metadata.current_function
{
let (span, sugg) = if let Some(param) = sig.decl.inputs.get(0) {
(param.span.shrink_to_lo(), "&self, ")
} else {
(
self.r
.session
.source_map()
.span_through_char(*fn_span, '(')
.shrink_to_hi(),
"&self",
)
};
err.span_suggestion_verbose(
span,
"if you meant to use `self`, you are also missing a `self` receiver \
argument",
sugg.to_string(),
Applicability::MaybeIncorrect,
);
}
}
}
// Emit special messages for unresolved `Self` and `self`.
if is_self_type(path, ns) {
err.code(rustc_errors::error_code!(E0411));
err.span_label(
span,
"`Self` is only available in impls, traits, and type definitions".to_string(),
);
return (err, Vec::new());
}
if is_self_value(path, ns) {
debug!("smart_resolve_path_fragment: E0424, source={:?}", source);
err.code(rustc_errors::error_code!(E0424));
err.span_label(span, match source {
PathSource::Pat => "`self` value is a keyword and may not be bound to variables or shadowed"
.to_string(),
_ => "`self` value is a keyword only available in methods with a `self` parameter"
.to_string(),
});
if let Some((fn_kind, span)) = &self.diagnostic_metadata.current_function {
// The current function has a `self' parameter, but we were unable to resolve
// a reference to `self`. This can only happen if the `self` identifier we
// are resolving came from a different hygiene context.
if fn_kind.decl().inputs.get(0).map_or(false, |p| p.is_self()) {
err.span_label(*span, "this function has a `self` parameter, but a macro invocation can only access identifiers it receives from parameters");
} else {
let doesnt = if is_assoc_fn {
let (span, sugg) = fn_kind
.decl()
.inputs
.get(0)
.map(|p| (p.span.shrink_to_lo(), "&self, "))
.unwrap_or_else(|| {
(
self.r
.session
.source_map()
.span_through_char(*span, '(')
.shrink_to_hi(),
"&self",
)
});
err.span_suggestion_verbose(
span,
"add a `self` receiver parameter to make the associated `fn` a method",
sugg.to_string(),
Applicability::MaybeIncorrect,
);
"doesn't"
} else {
"can't"
};
if let Some(ident) = fn_kind.ident() {
err.span_label(
ident.span,
&format!("this function {} have a `self` parameter", doesnt),
);
}
}
}
return (err, Vec::new());
}
// Try to lookup name in more relaxed fashion for better error reporting.
let ident = path.last().unwrap().ident;
let candidates = self
.r
.lookup_import_candidates(ident, ns, &self.parent_scope, is_expected)
.drain(..)
.filter(|ImportSuggestion { did, .. }| {
match (did, res.and_then(|res| res.opt_def_id())) {
(Some(suggestion_did), Some(actual_did)) => *suggestion_did != actual_did,
_ => true,
}
})
.collect::<Vec<_>>();
let crate_def_id = DefId::local(CRATE_DEF_INDEX);
if candidates.is_empty() && is_expected(Res::Def(DefKind::Enum, crate_def_id)) {
let mut enum_candidates: Vec<_> = self
.r
.lookup_import_candidates(ident, ns, &self.parent_scope, is_enum_variant)
.into_iter()
.map(|suggestion| import_candidate_to_enum_paths(&suggestion))
.filter(|(_, enum_ty_path)| !enum_ty_path.starts_with("std::prelude::"))
.collect();
if !enum_candidates.is_empty() {
if let (PathSource::Type, Some(span)) =
(source, self.diagnostic_metadata.current_type_ascription.last())
{
if self
.r
.session
.parse_sess
.type_ascription_path_suggestions
.borrow()
.contains(span)
{
// Already reported this issue on the lhs of the type ascription.
err.delay_as_bug();
return (err, candidates);
}
}
enum_candidates.sort();
// Contextualize for E0412 "cannot find type", but don't belabor the point
// (that it's a variant) for E0573 "expected type, found variant".
let preamble = if res.is_none() {
let others = match enum_candidates.len() {
1 => String::new(),
2 => " and 1 other".to_owned(),
n => format!(" and {} others", n),
};
format!("there is an enum variant `{}`{}; ", enum_candidates[0].0, others)
} else {
String::new()
};
let msg = format!("{}try using the variant's enum", preamble);
err.span_suggestions(
span,
&msg,
enum_candidates.into_iter().map(|(_variant_path, enum_ty_path)| enum_ty_path),
Applicability::MachineApplicable,
);
}
}
if path.len() == 1 && self.self_type_is_available(span) {
if let Some(candidate) = self.lookup_assoc_candidate(ident, ns, is_expected) {
let self_is_available = self.self_value_is_available(path[0].ident.span, span);
match candidate {
AssocSuggestion::Field => {
if self_is_available {
err.span_suggestion(
span,
"you might have meant to use the available field",
format!("self.{}", path_str),
Applicability::MachineApplicable,
);
} else {
err.span_label(span, "a field by this name exists in `Self`");
}
}
AssocSuggestion::MethodWithSelf if self_is_available => {
err.span_suggestion(
span,
"you might have meant to call the method",
format!("self.{}", path_str),
Applicability::MachineApplicable,
);
}
AssocSuggestion::MethodWithSelf
| AssocSuggestion::AssocFn
| AssocSuggestion::AssocConst
| AssocSuggestion::AssocType => {
err.span_suggestion(
span,
&format!("you might have meant to {}", candidate.action()),
format!("Self::{}", path_str),
Applicability::MachineApplicable,
);
}
}
return (err, candidates);
}
// If the first argument in call is `self` suggest calling a method.
if let Some((call_span, args_span)) = self.call_has_self_arg(source) {
let mut args_snippet = String::new();
if let Some(args_span) = args_span {
if let Ok(snippet) = self.r.session.source_map().span_to_snippet(args_span) {
args_snippet = snippet;
}
}
err.span_suggestion(
call_span,
&format!("try calling `{}` as a method", ident),
format!("self.{}({})", path_str, args_snippet),
Applicability::MachineApplicable,
);
return (err, candidates);
}
}
// Try Levenshtein algorithm.
let typo_sugg = self.lookup_typo_candidate(path, ns, is_expected, span);
// Try context-dependent help if relaxed lookup didn't work.
if let Some(res) = res {
if self.smart_resolve_context_dependent_help(
&mut err,
span,
source,
res,
&path_str,
&fallback_label,
) {
// We do this to avoid losing a secondary span when we override the main error span.
self.r.add_typo_suggestion(&mut err, typo_sugg, ident_span);
return (err, candidates);
}
}
let is_macro = base_span.from_expansion() && base_span.desugaring_kind().is_none();
if !self.type_ascription_suggestion(&mut err, base_span) {
let mut fallback = false;
if let (
PathSource::Trait(AliasPossibility::Maybe),
Some(Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, _)),
false,
) = (source, res, is_macro)
{
if let Some(bounds @ [_, .., _]) = self.diagnostic_metadata.current_trait_object {
fallback = true;
let spans: Vec<Span> = bounds
.iter()
.map(|bound| bound.span())
.filter(|&sp| sp != base_span)
.collect();
let start_span = bounds.iter().map(|bound| bound.span()).next().unwrap();
// `end_span` is the end of the poly trait ref (Foo + 'baz + Bar><)
let end_span = bounds.iter().map(|bound| bound.span()).last().unwrap();
// `last_bound_span` is the last bound of the poly trait ref (Foo + >'baz< + Bar)
let last_bound_span = spans.last().cloned().unwrap();
let mut multi_span: MultiSpan = spans.clone().into();
for sp in spans {
let msg = if sp == last_bound_span {
format!(
"...because of {} bound{}",
if bounds.len() <= 2 { "this" } else { "these" },
if bounds.len() <= 2 { "" } else { "s" },
)
} else {
String::new()
};
multi_span.push_span_label(sp, msg);
}
multi_span.push_span_label(
base_span,
"expected this type to be a trait...".to_string(),
);
err.span_help(
multi_span,
"`+` is used to constrain a \"trait object\" type with lifetimes or \
auto-traits; structs and enums can't be bound in that way",
);
if bounds.iter().all(|bound| match bound {
ast::GenericBound::Outlives(_) => true,
ast::GenericBound::Trait(tr, _) => tr.span == base_span,
}) {
let mut sugg = vec![];
if base_span != start_span {
sugg.push((start_span.until(base_span), String::new()));
}
if base_span != end_span {
sugg.push((base_span.shrink_to_hi().to(end_span), String::new()));
}
err.multipart_suggestion(
"if you meant to use a type and not a trait here, remove the bounds",
sugg,
Applicability::MaybeIncorrect,
);
}
}
}
fallback |= self.restrict_assoc_type_in_where_clause(span, &mut err);
if !self.r.add_typo_suggestion(&mut err, typo_sugg, ident_span) {
fallback = true;
match self.diagnostic_metadata.current_let_binding {
Some((pat_sp, Some(ty_sp), None))
if ty_sp.contains(base_span) && could_be_expr =>
{
err.span_suggestion_short(
pat_sp.between(ty_sp),
"use `=` if you meant to assign",
" = ".to_string(),
Applicability::MaybeIncorrect,
);
}
_ => {}
}
}
if fallback {
// Fallback label.
err.span_label(base_span, fallback_label);
}
}
if let Some(err_code) = &err.code {
if err_code == &rustc_errors::error_code!(E0425) {
for label_rib in &self.label_ribs {
for (label_ident, node_id) in &label_rib.bindings {
if format!("'{}", ident) == label_ident.to_string() {
err.span_label(label_ident.span, "a label with a similar name exists");
if let PathSource::Expr(Some(Expr {
kind: ExprKind::Break(None, Some(_)),
..
})) = source
{
err.span_suggestion(
span,
"use the similarly named label",
label_ident.name.to_string(),
Applicability::MaybeIncorrect,
);
// Do not lint against unused label when we suggest them.
self.diagnostic_metadata.unused_labels.remove(node_id);
}
}
}
}
} else if err_code == &rustc_errors::error_code!(E0412) {
if let Some(correct) = Self::likely_rust_type(path) {
err.span_suggestion(
span,
"perhaps you intended to use this type",
correct.to_string(),
Applicability::MaybeIncorrect,
);
}
}
}
(err, candidates)
}
/// Given `where <T as Bar>::Baz: String`, suggest `where T: Bar<Baz = String>`.
fn restrict_assoc_type_in_where_clause(
&mut self,
span: Span,
err: &mut DiagnosticBuilder<'_>,
) -> bool {
// Detect that we are actually in a `where` predicate.
let (bounded_ty, bounds, where_span) =
if let Some(ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
bounded_ty,
bound_generic_params,
bounds,
span,
})) = self.diagnostic_metadata.current_where_predicate
{
if !bound_generic_params.is_empty() {
return false;
}
(bounded_ty, bounds, span)
} else {
return false;
};
// Confirm that the target is an associated type.
let (ty, position, path) = if let ast::TyKind::Path(
Some(ast::QSelf { ty, position, .. }),
path,
) = &bounded_ty.kind
{
// use this to verify that ident is a type param.
let partial_res = if let Ok(Some(partial_res)) = self.resolve_qpath_anywhere(
bounded_ty.id,
None,
&Segment::from_path(path),
Namespace::TypeNS,
span,
true,
CrateLint::No,
) {
partial_res
} else {
return false;
};
if !(matches!(
partial_res.base_res(),
hir::def::Res::Def(hir::def::DefKind::AssocTy, _)
) && partial_res.unresolved_segments() == 0)
{
return false;
}
(ty, position, path)
} else {
return false;
};
if let ast::TyKind::Path(None, type_param_path) = &ty.peel_refs().kind {
// Confirm that the `SelfTy` is a type parameter.
let partial_res = if let Ok(Some(partial_res)) = self.resolve_qpath_anywhere(
bounded_ty.id,
None,
&Segment::from_path(type_param_path),
Namespace::TypeNS,
span,
true,
CrateLint::No,
) {
partial_res
} else {
return false;
};
if !(matches!(
partial_res.base_res(),
hir::def::Res::Def(hir::def::DefKind::TyParam, _)
) && partial_res.unresolved_segments() == 0)
{
return false;
}
if let (
[ast::PathSegment { ident: constrain_ident, args: None, .. }],
[ast::GenericBound::Trait(poly_trait_ref, ast::TraitBoundModifier::None)],
) = (&type_param_path.segments[..], &bounds[..])
{
if let [ast::PathSegment { ident, args: None, .. }] =
&poly_trait_ref.trait_ref.path.segments[..]
{
if ident.span == span {
err.span_suggestion_verbose(
*where_span,
&format!("constrain the associated type to `{}`", ident),
format!(
"{}: {}<{} = {}>",
self.r
.session
.source_map()
.span_to_snippet(ty.span) // Account for `<&'a T as Foo>::Bar`.
.unwrap_or_else(|_| constrain_ident.to_string()),
path.segments[..*position]
.iter()
.map(|segment| path_segment_to_string(segment))
.collect::<Vec<_>>()
.join("::"),
path.segments[*position..]
.iter()
.map(|segment| path_segment_to_string(segment))
.collect::<Vec<_>>()
.join("::"),
ident,
),
Applicability::MaybeIncorrect,
);
}
return true;
}
}
}
false
}
/// Check if the source is call expression and the first argument is `self`. If true,
/// return the span of whole call and the span for all arguments expect the first one (`self`).
fn call_has_self_arg(&self, source: PathSource<'_>) -> Option<(Span, Option<Span>)> {
let mut has_self_arg = None;
if let PathSource::Expr(Some(parent)) = source {
match &parent.kind {
ExprKind::Call(_, args) if !args.is_empty() => {
let mut expr_kind = &args[0].kind;
loop {
match expr_kind {
ExprKind::Path(_, arg_name) if arg_name.segments.len() == 1 => {
if arg_name.segments[0].ident.name == kw::SelfLower {
let call_span = parent.span;
let tail_args_span = if args.len() > 1 {
Some(Span::new(
args[1].span.lo(),
args.last().unwrap().span.hi(),
call_span.ctxt(),
))
} else {
None
};
has_self_arg = Some((call_span, tail_args_span));
}
break;
}
ExprKind::AddrOf(_, _, expr) => expr_kind = &expr.kind,
_ => break,
}
}
}
_ => (),
}
};
has_self_arg
}
fn followed_by_brace(&self, span: Span) -> (bool, Option<Span>) {
// HACK(estebank): find a better way to figure out that this was a
// parser issue where a struct literal is being used on an expression
// where a brace being opened means a block is being started. Look
// ahead for the next text to see if `span` is followed by a `{`.
let sm = self.r.session.source_map();
let mut sp = span;
loop {
sp = sm.next_point(sp);
match sm.span_to_snippet(sp) {
Ok(ref snippet) => {
if snippet.chars().any(|c| !c.is_whitespace()) {
break;
}
}
_ => break,
}
}
let followed_by_brace = matches!(sm.span_to_snippet(sp), Ok(ref snippet) if snippet == "{");
// In case this could be a struct literal that needs to be surrounded
// by parentheses, find the appropriate span.
let mut i = 0;
let mut closing_brace = None;
loop {
sp = sm.next_point(sp);
match sm.span_to_snippet(sp) {
Ok(ref snippet) => {
if snippet == "}" {
closing_brace = Some(span.to(sp));
break;
}
}
_ => break,
}
i += 1;
// The bigger the span, the more likely we're incorrect --
// bound it to 100 chars long.
if i > 100 {
break;
}
}
(followed_by_brace, closing_brace)
}
/// Provides context-dependent help for errors reported by the `smart_resolve_path_fragment`
/// function.
/// Returns `true` if able to provide context-dependent help.
fn smart_resolve_context_dependent_help(
&mut self,
err: &mut DiagnosticBuilder<'a>,
span: Span,
source: PathSource<'_>,
res: Res,
path_str: &str,
fallback_label: &str,
) -> bool {
let ns = source.namespace();
let is_expected = &|res| source.is_expected(res);
let path_sep = |err: &mut DiagnosticBuilder<'_>, expr: &Expr| match expr.kind {
ExprKind::Field(_, ident) => {
err.span_suggestion(
expr.span,
"use the path separator to refer to an item",
format!("{}::{}", path_str, ident),
Applicability::MaybeIncorrect,
);
true
}
ExprKind::MethodCall(ref segment, ..) => {
let span = expr.span.with_hi(segment.ident.span.hi());
err.span_suggestion(
span,
"use the path separator to refer to an item",
format!("{}::{}", path_str, segment.ident),
Applicability::MaybeIncorrect,
);
true
}
_ => false,
};
let find_span = |source: &PathSource<'_>, err: &mut DiagnosticBuilder<'_>| {
match source {
PathSource::Expr(Some(Expr { span, kind: ExprKind::Call(_, _), .. }))
| PathSource::TupleStruct(span, _) => {
// We want the main underline to cover the suggested code as well for
// cleaner output.
err.set_span(*span);
*span
}
_ => span,
}
};
let mut bad_struct_syntax_suggestion = |def_id: DefId| {
let (followed_by_brace, closing_brace) = self.followed_by_brace(span);
match source {
PathSource::Expr(Some(
parent @ Expr { kind: ExprKind::Field(..) | ExprKind::MethodCall(..), .. },
)) if path_sep(err, &parent) => {}
PathSource::Expr(
None
| Some(Expr {
kind:
ExprKind::Path(..)
| ExprKind::Binary(..)
| ExprKind::Unary(..)
| ExprKind::If(..)
| ExprKind::While(..)
| ExprKind::ForLoop(..)
| ExprKind::Match(..),
..
}),
) if followed_by_brace => {
if let Some(sp) = closing_brace {
err.span_label(span, fallback_label);
err.multipart_suggestion(
"surround the struct literal with parentheses",
vec![
(sp.shrink_to_lo(), "(".to_string()),
(sp.shrink_to_hi(), ")".to_string()),
],
Applicability::MaybeIncorrect,
);
} else {
err.span_label(
span, // Note the parentheses surrounding the suggestion below
format!(
"you might want to surround a struct literal with parentheses: \
`({} {{ /* fields */ }})`?",
path_str
),
);
}
}
PathSource::Expr(_) | PathSource::TupleStruct(..) | PathSource::Pat => {
let span = find_span(&source, err);
if let Some(span) = self.def_span(def_id) {
err.span_label(span, &format!("`{}` defined here", path_str));
}
let (tail, descr, applicability) = match source {
PathSource::Pat | PathSource::TupleStruct(..) => {
("", "pattern", Applicability::MachineApplicable)
}
_ => (": val", "literal", Applicability::HasPlaceholders),
};
let (fields, applicability) = match self.r.field_names.get(&def_id) {
Some(fields) => (
fields
.iter()
.map(|f| format!("{}{}", f.node, tail))
.collect::<Vec<String>>()
.join(", "),
applicability,
),
None => ("/* fields */".to_string(), Applicability::HasPlaceholders),
};
let pad = match self.r.field_names.get(&def_id) {
Some(fields) if fields.is_empty() => "",
_ => " ",
};
err.span_suggestion(
span,
&format!("use struct {} syntax instead", descr),
format!("{path_str} {{{pad}{fields}{pad}}}"),
applicability,
);
}
_ => {
err.span_label(span, fallback_label);
}
}
};
match (res, source) {
(Res::Def(DefKind::Macro(MacroKind::Bang), _), _) => {
err.span_label(span, fallback_label);
err.span_suggestion_verbose(
span.shrink_to_hi(),
"use `!` to invoke the macro",
"!".to_string(),
Applicability::MaybeIncorrect,
);
if path_str == "try" && span.rust_2015() {
err.note("if you want the `try` keyword, you need Rust 2018 or later");
}
}
(Res::Def(DefKind::TyAlias, def_id), PathSource::Trait(_)) => {
err.span_label(span, "type aliases cannot be used as traits");
if self.r.session.is_nightly_build() {
let msg = "you might have meant to use `#![feature(trait_alias)]` instead of a \
`type` alias";
if let Some(span) = self.def_span(def_id) {
if let Ok(snip) = self.r.session.source_map().span_to_snippet(span) {
// The span contains a type alias so we should be able to
// replace `type` with `trait`.
let snip = snip.replacen("type", "trait", 1);
err.span_suggestion(span, msg, snip, Applicability::MaybeIncorrect);
} else {
err.span_help(span, msg);
}
} else {
err.help(msg);
}
}
}
(Res::Def(DefKind::Mod, _), PathSource::Expr(Some(parent))) => {
if !path_sep(err, &parent) {
return false;
}
}
(
Res::Def(DefKind::Enum, def_id),
PathSource::TupleStruct(..) | PathSource::Expr(..),
) => {
if self
.diagnostic_metadata
.current_type_ascription
.last()
.map(|sp| {
self.r
.session
.parse_sess
.type_ascription_path_suggestions
.borrow()
.contains(&sp)
})
.unwrap_or(false)
{
err.delay_as_bug();
// We already suggested changing `:` into `::` during parsing.
return false;
}
self.suggest_using_enum_variant(err, source, def_id, span);
}
(Res::Def(DefKind::Struct, def_id), _) if ns == ValueNS => {
let (ctor_def, ctor_vis, fields) =
if let Some(struct_ctor) = self.r.struct_constructors.get(&def_id).cloned() {
struct_ctor
} else {
bad_struct_syntax_suggestion(def_id);
return true;
};
let is_accessible = self.r.is_accessible_from(ctor_vis, self.parent_scope.module);
if !is_expected(ctor_def) || is_accessible {
return true;
}
let field_spans = match source {
// e.g. `if let Enum::TupleVariant(field1, field2) = _`
PathSource::TupleStruct(_, pattern_spans) => {
err.set_primary_message(
"cannot match against a tuple struct which contains private fields",
);
// Use spans of the tuple struct pattern.
Some(Vec::from(pattern_spans))
}
// e.g. `let _ = Enum::TupleVariant(field1, field2);`
_ if source.is_call() => {
err.set_primary_message(
"cannot initialize a tuple struct which contains private fields",
);
// Use spans of the tuple struct definition.
self.r
.field_names
.get(&def_id)
.map(|fields| fields.iter().map(|f| f.span).collect::<Vec<_>>())
}
_ => None,
};
if let Some(spans) =
field_spans.filter(|spans| spans.len() > 0 && fields.len() == spans.len())
{
let non_visible_spans: Vec<Span> = iter::zip(&fields, &spans)
.filter(|(vis, _)| {
!self.r.is_accessible_from(**vis, self.parent_scope.module)
})
.map(|(_, span)| *span)
.collect();
if non_visible_spans.len() > 0 {
let mut m: rustc_span::MultiSpan = non_visible_spans.clone().into();
non_visible_spans
.into_iter()
.for_each(|s| m.push_span_label(s, "private field".to_string()));
err.span_note(m, "constructor is not visible here due to private fields");
}
return true;
}
err.span_label(
span,
"constructor is not visible here due to private fields".to_string(),
);
}
(
Res::Def(
DefKind::Union | DefKind::Variant | DefKind::Ctor(_, CtorKind::Fictive),
def_id,
),
_,
) if ns == ValueNS => {
bad_struct_syntax_suggestion(def_id);
}
(Res::Def(DefKind::Ctor(_, CtorKind::Const), def_id), _) if ns == ValueNS => {
match source {
PathSource::Expr(_) | PathSource::TupleStruct(..) | PathSource::Pat => {
let span = find_span(&source, err);
if let Some(span) = self.def_span(def_id) {
err.span_label(span, &format!("`{}` defined here", path_str));
}
err.span_suggestion(
span,
&format!("use this syntax instead"),
format!("{path_str}"),
Applicability::MaybeIncorrect,
);
}
_ => return false,
}
}
(Res::Def(DefKind::Ctor(_, CtorKind::Fn), def_id), _) if ns == ValueNS => {
if let Some(span) = self.def_span(def_id) {
err.span_label(span, &format!("`{}` defined here", path_str));
}
let fields = self.r.field_names.get(&def_id).map_or_else(
|| "/* fields */".to_string(),
|fields| vec!["_"; fields.len()].join(", "),
);
err.span_suggestion(
span,
"use the tuple variant pattern syntax instead",
format!("{}({})", path_str, fields),
Applicability::HasPlaceholders,
);
}
(Res::SelfTy(..), _) if ns == ValueNS => {
err.span_label(span, fallback_label);
err.note("can't use `Self` as a constructor, you must use the implemented struct");
}
(Res::Def(DefKind::TyAlias | DefKind::AssocTy, _), _) if ns == ValueNS => {
err.note("can't use a type alias as a constructor");
}
_ => return false,
}
true
}
fn lookup_assoc_candidate<FilterFn>(
&mut self,
ident: Ident,
ns: Namespace,
filter_fn: FilterFn,
) -> Option<AssocSuggestion>
where
FilterFn: Fn(Res) -> bool,
{
fn extract_node_id(t: &Ty) -> Option<NodeId> {
match t.kind {
TyKind::Path(None, _) => Some(t.id),
TyKind::Rptr(_, ref mut_ty) => extract_node_id(&mut_ty.ty),
// This doesn't handle the remaining `Ty` variants as they are not
// that commonly the self_type, it might be interesting to provide
// support for those in future.
_ => None,
}
}
// Fields are generally expected in the same contexts as locals.
if filter_fn(Res::Local(ast::DUMMY_NODE_ID)) {
if let Some(node_id) =
self.diagnostic_metadata.current_self_type.as_ref().and_then(extract_node_id)
{
// Look for a field with the same name in the current self_type.
if let Some(resolution) = self.r.partial_res_map.get(&node_id) {
match resolution.base_res() {
Res::Def(DefKind::Struct | DefKind::Union, did)
if resolution.unresolved_segments() == 0 =>
{
if let Some(field_names) = self.r.field_names.get(&did) {
if field_names
.iter()
.any(|&field_name| ident.name == field_name.node)
{
return Some(AssocSuggestion::Field);
}
}
}
_ => {}
}
}
}
}
if let Some(items) = self.diagnostic_metadata.current_trait_assoc_items {
for assoc_item in items {
if assoc_item.ident == ident {
return Some(match &assoc_item.kind {
ast::AssocItemKind::Const(..) => AssocSuggestion::AssocConst,
ast::AssocItemKind::Fn(box ast::FnKind(_, sig, ..))
if sig.decl.has_self() =>
{
AssocSuggestion::MethodWithSelf
}
ast::AssocItemKind::Fn(..) => AssocSuggestion::AssocFn,
ast::AssocItemKind::TyAlias(..) => AssocSuggestion::AssocType,
ast::AssocItemKind::MacCall(_) => continue,
});
}
}
}
// Look for associated items in the current trait.
if let Some((module, _)) = self.current_trait_ref {
if let Ok(binding) = self.r.resolve_ident_in_module(
ModuleOrUniformRoot::Module(module),
ident,
ns,
&self.parent_scope,
false,
module.span,
) {
let res = binding.res();
if filter_fn(res) {
if self.r.has_self.contains(&res.def_id()) {
return Some(AssocSuggestion::MethodWithSelf);
} else {
match res {
Res::Def(DefKind::AssocFn, _) => return Some(AssocSuggestion::AssocFn),
Res::Def(DefKind::AssocConst, _) => {
return Some(AssocSuggestion::AssocConst);
}
Res::Def(DefKind::AssocTy, _) => {
return Some(AssocSuggestion::AssocType);
}
_ => {}
}
}
}
}
}
None
}
fn lookup_typo_candidate(
&mut self,
path: &[Segment],
ns: Namespace,
filter_fn: &impl Fn(Res) -> bool,
span: Span,
) -> Option<TypoSuggestion> {
let mut names = Vec::new();
if path.len() == 1 {
// Search in lexical scope.
// Walk backwards up the ribs in scope and collect candidates.
for rib in self.ribs[ns].iter().rev() {
// Locals and type parameters
for (ident, &res) in &rib.bindings {
if filter_fn(res) {
names.push(TypoSuggestion::from_res(ident.name, res));
}
}
// Items in scope
if let RibKind::ModuleRibKind(module) = rib.kind {
// Items from this module
self.r.add_module_candidates(module, &mut names, &filter_fn);
if let ModuleKind::Block(..) = module.kind {
// We can see through blocks
} else {
// Items from the prelude
if !module.no_implicit_prelude {
let extern_prelude = self.r.extern_prelude.clone();
names.extend(extern_prelude.iter().flat_map(|(ident, _)| {
self.r.crate_loader.maybe_process_path_extern(ident.name).and_then(
|crate_id| {
let crate_mod = Res::Def(
DefKind::Mod,
DefId { krate: crate_id, index: CRATE_DEF_INDEX },
);
if filter_fn(crate_mod) {
Some(TypoSuggestion::from_res(ident.name, crate_mod))
} else {
None
}
},
)
}));
if let Some(prelude) = self.r.prelude {
self.r.add_module_candidates(prelude, &mut names, &filter_fn);
}
}
break;
}
}
}
// Add primitive types to the mix
if filter_fn(Res::PrimTy(PrimTy::Bool)) {
names.extend(
PrimTy::ALL.iter().map(|prim_ty| {
TypoSuggestion::from_res(prim_ty.name(), Res::PrimTy(*prim_ty))
}),
)
}
} else {
// Search in module.
let mod_path = &path[..path.len() - 1];
if let PathResult::Module(module) =
self.resolve_path(mod_path, Some(TypeNS), false, span, CrateLint::No)
{
if let ModuleOrUniformRoot::Module(module) = module {
self.r.add_module_candidates(module, &mut names, &filter_fn);
}
}
}
let name = path[path.len() - 1].ident.name;
// Make sure error reporting is deterministic.
names.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
match find_best_match_for_name(
&names.iter().map(|suggestion| suggestion.candidate).collect::<Vec<Symbol>>(),
name,
None,
) {
Some(found) if found != name => {
names.into_iter().find(|suggestion| suggestion.candidate == found)
}
_ => None,
}
}
// Returns the name of the Rust type approximately corresponding to
// a type name in another programming language.
fn likely_rust_type(path: &[Segment]) -> Option<Symbol> {
let name = path[path.len() - 1].ident.as_str();
// Common Java types
Some(match &*name {
"byte" => sym::u8, // In Java, bytes are signed, but in practice one almost always wants unsigned bytes.
"short" => sym::i16,
"boolean" => sym::bool,
"int" => sym::i32,
"long" => sym::i64,
"float" => sym::f32,
"double" => sym::f64,
_ => return None,
})
}
/// Only used in a specific case of type ascription suggestions
fn get_colon_suggestion_span(&self, start: Span) -> Span {
let sm = self.r.session.source_map();
start.to(sm.next_point(start))
}
fn type_ascription_suggestion(&self, err: &mut DiagnosticBuilder<'_>, base_span: Span) -> bool {
let sm = self.r.session.source_map();
let base_snippet = sm.span_to_snippet(base_span);
if let Some(&sp) = self.diagnostic_metadata.current_type_ascription.last() {
if let Ok(snippet) = sm.span_to_snippet(sp) {
let len = snippet.trim_end().len() as u32;
if snippet.trim() == ":" {
let colon_sp =
sp.with_lo(sp.lo() + BytePos(len - 1)).with_hi(sp.lo() + BytePos(len));
let mut show_label = true;
if sm.is_multiline(sp) {
err.span_suggestion_short(
colon_sp,
"maybe you meant to write `;` here",
";".to_string(),
Applicability::MaybeIncorrect,
);
} else {
let after_colon_sp =
self.get_colon_suggestion_span(colon_sp.shrink_to_hi());
if snippet.len() == 1 {
// `foo:bar`
err.span_suggestion(
colon_sp,
"maybe you meant to write a path separator here",
"::".to_string(),
Applicability::MaybeIncorrect,
);
show_label = false;
if !self
.r
.session
.parse_sess
.type_ascription_path_suggestions
.borrow_mut()
.insert(colon_sp)
{
err.delay_as_bug();
}
}
if let Ok(base_snippet) = base_snippet {
let mut sp = after_colon_sp;
for _ in 0..100 {
// Try to find an assignment
sp = sm.next_point(sp);
let snippet = sm.span_to_snippet(sp.to(sm.next_point(sp)));
match snippet {
Ok(ref x) if x.as_str() == "=" => {
err.span_suggestion(
base_span,
"maybe you meant to write an assignment here",
format!("let {}", base_snippet),
Applicability::MaybeIncorrect,
);
show_label = false;
break;
}
Ok(ref x) if x.as_str() == "\n" => break,
Err(_) => break,
Ok(_) => {}
}
}
}
}
if show_label {
err.span_label(
base_span,
"expecting a type here because of type ascription",
);
}
return show_label;
}
}
}
false
}
fn find_module(&mut self, def_id: DefId) -> Option<(Module<'a>, ImportSuggestion)> {
let mut result = None;
let mut seen_modules = FxHashSet::default();
let mut worklist = vec![(self.r.graph_root, Vec::new())];
while let Some((in_module, path_segments)) = worklist.pop() {
// abort if the module is already found
if result.is_some() {
break;
}
in_module.for_each_child(self.r, |_, ident, _, name_binding| {
// abort if the module is already found or if name_binding is private external
if result.is_some() || !name_binding.vis.is_visible_locally() {
return;
}
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
path_segments.push(ast::PathSegment::from_ident(ident));
let module_def_id = module.def_id().unwrap();
if module_def_id == def_id {
let path =
Path { span: name_binding.span, segments: path_segments, tokens: None };
result = Some((
module,
ImportSuggestion {
did: Some(def_id),
descr: "module",
path,
accessible: true,
},
));
} else {
// add the module to the lookup
if seen_modules.insert(module_def_id) {
worklist.push((module, path_segments));
}
}
}
});
}
result
}
fn collect_enum_ctors(&mut self, def_id: DefId) -> Option<Vec<(Path, DefId, CtorKind)>> {
self.find_module(def_id).map(|(enum_module, enum_import_suggestion)| {
let mut variants = Vec::new();
enum_module.for_each_child(self.r, |_, ident, _, name_binding| {
if let Res::Def(DefKind::Ctor(CtorOf::Variant, kind), def_id) = name_binding.res() {
let mut segms = enum_import_suggestion.path.segments.clone();
segms.push(ast::PathSegment::from_ident(ident));
let path = Path { span: name_binding.span, segments: segms, tokens: None };
variants.push((path, def_id, kind));
}
});
variants
})
}
/// Adds a suggestion for using an enum's variant when an enum is used instead.
fn suggest_using_enum_variant(
&mut self,
err: &mut DiagnosticBuilder<'a>,
source: PathSource<'_>,
def_id: DefId,
span: Span,
) {
let variants = match self.collect_enum_ctors(def_id) {
Some(variants) => variants,
None => {
err.note("you might have meant to use one of the enum's variants");
return;
}
};
let suggest_only_tuple_variants =
matches!(source, PathSource::TupleStruct(..)) || source.is_call();
if suggest_only_tuple_variants {
// Suggest only tuple variants regardless of whether they have fields and do not
// suggest path with added parenthesis.
let mut suggestable_variants = variants
.iter()
.filter(|(.., kind)| *kind == CtorKind::Fn)
.map(|(variant, ..)| path_names_to_string(variant))
.collect::<Vec<_>>();
let non_suggestable_variant_count = variants.len() - suggestable_variants.len();
let source_msg = if source.is_call() {
"to construct"
} else if matches!(source, PathSource::TupleStruct(..)) {
"to match against"
} else {
unreachable!()
};
if !suggestable_variants.is_empty() {
let msg = if non_suggestable_variant_count == 0 && suggestable_variants.len() == 1 {
format!("try {} the enum's variant", source_msg)
} else {
format!("try {} one of the enum's variants", source_msg)
};
err.span_suggestions(
span,
&msg,
suggestable_variants.drain(..),
Applicability::MaybeIncorrect,
);
}
// If the enum has no tuple variants..
if non_suggestable_variant_count == variants.len() {
err.help(&format!("the enum has no tuple variants {}", source_msg));
}
// If there are also non-tuple variants..
if non_suggestable_variant_count == 1 {
err.help(&format!(
"you might have meant {} the enum's non-tuple variant",
source_msg
));
} else if non_suggestable_variant_count >= 1 {
err.help(&format!(
"you might have meant {} one of the enum's non-tuple variants",
source_msg
));
}
} else {
let needs_placeholder = |def_id: DefId, kind: CtorKind| {
let has_no_fields = self.r.field_names.get(&def_id).map_or(false, |f| f.is_empty());
match kind {
CtorKind::Const => false,
CtorKind::Fn | CtorKind::Fictive if has_no_fields => false,
_ => true,
}
};
let mut suggestable_variants = variants
.iter()
.filter(|(_, def_id, kind)| !needs_placeholder(*def_id, *kind))
.map(|(variant, _, kind)| (path_names_to_string(variant), kind))
.map(|(variant, kind)| match kind {
CtorKind::Const => variant,
CtorKind::Fn => format!("({}())", variant),
CtorKind::Fictive => format!("({} {{}})", variant),
})
.collect::<Vec<_>>();
if !suggestable_variants.is_empty() {
let msg = if suggestable_variants.len() == 1 {
"you might have meant to use the following enum variant"
} else {
"you might have meant to use one of the following enum variants"
};
err.span_suggestions(
span,
msg,
suggestable_variants.drain(..),
Applicability::MaybeIncorrect,
);
}
let mut suggestable_variants_with_placeholders = variants
.iter()
.filter(|(_, def_id, kind)| needs_placeholder(*def_id, *kind))
.map(|(variant, _, kind)| (path_names_to_string(variant), kind))
.filter_map(|(variant, kind)| match kind {
CtorKind::Fn => Some(format!("({}(/* fields */))", variant)),
CtorKind::Fictive => Some(format!("({} {{ /* fields */ }})", variant)),
_ => None,
})
.collect::<Vec<_>>();
if !suggestable_variants_with_placeholders.is_empty() {
let msg = match (
suggestable_variants.is_empty(),
suggestable_variants_with_placeholders.len(),
) {
(true, 1) => "the following enum variant is available",
(true, _) => "the following enum variants are available",
(false, 1) => "alternatively, the following enum variant is available",
(false, _) => "alternatively, the following enum variants are also available",
};
err.span_suggestions(
span,
msg,
suggestable_variants_with_placeholders.drain(..),
Applicability::HasPlaceholders,
);
}
};
if def_id.is_local() {
if let Some(span) = self.def_span(def_id) {
err.span_note(span, "the enum is defined here");
}
}
}
crate fn report_missing_type_error(
&self,
path: &[Segment],
) -> Option<(Span, &'static str, String, Applicability)> {
let (ident, span) = match path {
[segment] if !segment.has_generic_args => {
(segment.ident.to_string(), segment.ident.span)
}
_ => return None,
};
let mut iter = ident.chars().map(|c| c.is_uppercase());
let single_uppercase_char =
matches!(iter.next(), Some(true)) && matches!(iter.next(), None);
if !self.diagnostic_metadata.currently_processing_generics && !single_uppercase_char {
return None;
}
match (self.diagnostic_metadata.current_item, single_uppercase_char, self.diagnostic_metadata.currently_processing_generics) {
(Some(Item { kind: ItemKind::Fn(..), ident, .. }), _, _) if ident.name == sym::main => {
// Ignore `fn main()` as we don't want to suggest `fn main<T>()`
}
(
Some(Item {
kind:
kind @ ItemKind::Fn(..)
| kind @ ItemKind::Enum(..)
| kind @ ItemKind::Struct(..)
| kind @ ItemKind::Union(..),
..
}),
true, _
)
// Without the 2nd `true`, we'd suggest `impl <T>` for `impl T` when a type `T` isn't found
| (Some(Item { kind: kind @ ItemKind::Impl(..), .. }), true, true)
| (Some(Item { kind, .. }), false, _) => {
// Likely missing type parameter.
if let Some(generics) = kind.generics() {
if span.overlaps(generics.span) {
// Avoid the following:
// error[E0405]: cannot find trait `A` in this scope
// --> $DIR/typo-suggestion-named-underscore.rs:CC:LL
// |
// L | fn foo<T: A>(x: T) {} // Shouldn't suggest underscore
// | ^- help: you might be missing a type parameter: `, A`
// | |
// | not found in this scope
return None;
}
let msg = "you might be missing a type parameter";
let (span, sugg) = if let [.., param] = &generics.params[..] {
let span = if let [.., bound] = ¶m.bounds[..] {
bound.span()
} else if let GenericParam {
kind: GenericParamKind::Const { ty, kw_span: _, default }, ..
} = param {
default.as_ref().map(|def| def.value.span).unwrap_or(ty.span)
} else {
param.ident.span
};
(span, format!(", {}", ident))
} else {
(generics.span, format!("<{}>", ident))
};
// Do not suggest if this is coming from macro expansion.
if !span.from_expansion() {
return Some((
span.shrink_to_hi(),
msg,
sugg,
Applicability::MaybeIncorrect,
));
}
}
}
_ => {}
}
None
}
/// Given the target `label`, search the `rib_index`th label rib for similarly named labels,
/// optionally returning the closest match and whether it is reachable.
crate fn suggestion_for_label_in_rib(
&self,
rib_index: usize,
label: Ident,
) -> Option<LabelSuggestion> {
// Are ribs from this `rib_index` within scope?
let within_scope = self.is_label_valid_from_rib(rib_index);
let rib = &self.label_ribs[rib_index];
let names = rib
.bindings
.iter()
.filter(|(id, _)| id.span.ctxt() == label.span.ctxt())
.map(|(id, _)| id.name)
.collect::<Vec<Symbol>>();
find_best_match_for_name(&names, label.name, None).map(|symbol| {
// Upon finding a similar name, get the ident that it was from - the span
// contained within helps make a useful diagnostic. In addition, determine
// whether this candidate is within scope.
let (ident, _) = rib.bindings.iter().find(|(ident, _)| ident.name == symbol).unwrap();
(*ident, within_scope)
})
}
}
impl<'tcx> LifetimeContext<'_, 'tcx> {
crate fn report_missing_lifetime_specifiers(
&self,
spans: Vec<Span>,
count: usize,
) -> DiagnosticBuilder<'tcx> {
struct_span_err!(
self.tcx.sess,
spans,
E0106,
"missing lifetime specifier{}",
pluralize!(count)
)
}
crate fn emit_undeclared_lifetime_error(&self, lifetime_ref: &hir::Lifetime) {
let mut err = struct_span_err!(
self.tcx.sess,
lifetime_ref.span,
E0261,
"use of undeclared lifetime name `{}`",
lifetime_ref
);
err.span_label(lifetime_ref.span, "undeclared lifetime");
let mut suggests_in_band = false;
let mut suggest_note = true;
for missing in &self.missing_named_lifetime_spots {
match missing {
MissingLifetimeSpot::Generics(generics) => {
let (span, sugg) = if let Some(param) = generics.params.iter().find(|p| {
!matches!(
p.kind,
hir::GenericParamKind::Type {
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
..
} | hir::GenericParamKind::Lifetime {
kind: hir::LifetimeParamKind::Elided,
}
)
}) {
(param.span.shrink_to_lo(), format!("{}, ", lifetime_ref))
} else {
suggests_in_band = true;
(generics.span, format!("<{}>", lifetime_ref))
};
if !span.from_expansion() {
err.span_suggestion(
span,
&format!("consider introducing lifetime `{}` here", lifetime_ref),
sugg,
Applicability::MaybeIncorrect,
);
} else if suggest_note {
suggest_note = false; // Avoid displaying the same help multiple times.
err.span_label(
span,
&format!(
"lifetime `{}` is missing in item created through this procedural \
macro",
lifetime_ref,
),
);
}
}
MissingLifetimeSpot::HigherRanked { span, span_type } => {
err.span_suggestion(
*span,
&format!(
"consider making the {} lifetime-generic with a new `{}` lifetime",
span_type.descr(),
lifetime_ref
),
span_type.suggestion(&lifetime_ref.to_string()),
Applicability::MaybeIncorrect,
);
err.note(
"for more information on higher-ranked polymorphism, visit \
https://doc.rust-lang.org/nomicon/hrtb.html",
);
}
_ => {}
}
}
if self.tcx.sess.is_nightly_build()
&& !self.tcx.features().in_band_lifetimes
&& suggests_in_band
{
err.help(
"if you want to experiment with in-band lifetime bindings, \
add `#![feature(in_band_lifetimes)]` to the crate attributes",
);
}
err.emit();
}
// FIXME(const_generics): This patches over a ICE caused by non-'static lifetimes in const
// generics. We are disallowing this until we can decide on how we want to handle non-'static
// lifetimes in const generics. See issue #74052 for discussion.
crate fn emit_non_static_lt_in_const_generic_error(&self, lifetime_ref: &hir::Lifetime) {
let mut err = struct_span_err!(
self.tcx.sess,
lifetime_ref.span,
E0771,
"use of non-static lifetime `{}` in const generic",
lifetime_ref
);
err.note(
"for more information, see issue #74052 \
<https://github.com/rust-lang/rust/issues/74052>",
);
err.emit();
}
crate fn is_trait_ref_fn_scope(&mut self, trait_ref: &'tcx hir::PolyTraitRef<'tcx>) -> bool {
if let def::Res::Def(_, did) = trait_ref.trait_ref.path.res {
if [
self.tcx.lang_items().fn_once_trait(),
self.tcx.lang_items().fn_trait(),
self.tcx.lang_items().fn_mut_trait(),
]
.contains(&Some(did))
{
let (span, span_type) = match &trait_ref.bound_generic_params {
[] => (trait_ref.span.shrink_to_lo(), ForLifetimeSpanType::BoundEmpty),
[.., bound] => (bound.span.shrink_to_hi(), ForLifetimeSpanType::BoundTail),
};
self.missing_named_lifetime_spots
.push(MissingLifetimeSpot::HigherRanked { span, span_type });
return true;
}
};
false
}
crate fn add_missing_lifetime_specifiers_label(
&self,
err: &mut DiagnosticBuilder<'_>,
mut spans_with_counts: Vec<(Span, usize)>,
lifetime_names: &FxHashSet<Symbol>,
lifetime_spans: Vec<Span>,
params: &[ElisionFailureInfo],
) {
let snippets: Vec<Option<String>> = spans_with_counts
.iter()
.map(|(span, _)| self.tcx.sess.source_map().span_to_snippet(*span).ok())
.collect();
// Empty generics are marked with a span of "<", but since from now on
// that information is in the snippets it can be removed from the spans.
for ((span, _), snippet) in spans_with_counts.iter_mut().zip(&snippets) {
if snippet.as_deref() == Some("<") {
*span = span.shrink_to_hi();
}
}
for &(span, count) in &spans_with_counts {
err.span_label(
span,
format!(
"expected {} lifetime parameter{}",
if count == 1 { "named".to_string() } else { count.to_string() },
pluralize!(count),
),
);
}
let suggest_existing =
|err: &mut DiagnosticBuilder<'_>,
name: &str,
formatters: Vec<Option<Box<dyn Fn(&str) -> String>>>| {
if let Some(MissingLifetimeSpot::HigherRanked { span: for_span, span_type }) =
self.missing_named_lifetime_spots.iter().rev().next()
{
// When we have `struct S<'a>(&'a dyn Fn(&X) -> &X);` we want to not only suggest
// using `'a`, but also introduce the concept of HRLTs by suggesting
// `struct S<'a>(&'a dyn for<'b> Fn(&X) -> &'b X);`. (#72404)
let mut introduce_suggestion = vec![];
let a_to_z_repeat_n = |n| {
(b'a'..=b'z').map(move |c| {
let mut s = '\''.to_string();
s.extend(std::iter::repeat(char::from(c)).take(n));
s
})
};
// If all single char lifetime names are present, we wrap around and double the chars.
let lt_name = (1..)
.flat_map(a_to_z_repeat_n)
.find(|lt| !lifetime_names.contains(&Symbol::intern(<)))
.unwrap();
let msg = format!(
"consider making the {} lifetime-generic with a new `{}` lifetime",
span_type.descr(),
lt_name,
);
err.note(
"for more information on higher-ranked polymorphism, visit \
https://doc.rust-lang.org/nomicon/hrtb.html",
);
let for_sugg = span_type.suggestion(<_name);
for param in params {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(param.span)
{
if snippet.starts_with('&') && !snippet.starts_with("&'") {
introduce_suggestion
.push((param.span, format!("&{} {}", lt_name, &snippet[1..])));
} else if let Some(stripped) = snippet.strip_prefix("&'_ ") {
introduce_suggestion
.push((param.span, format!("&{} {}", lt_name, stripped)));
}
}
}
introduce_suggestion.push((*for_span, for_sugg));
for ((span, _), formatter) in spans_with_counts.iter().zip(formatters.iter()) {
if let Some(formatter) = formatter {
introduce_suggestion.push((*span, formatter(<_name)));
}
}
err.multipart_suggestion_with_style(
&msg,
introduce_suggestion,
Applicability::MaybeIncorrect,
SuggestionStyle::ShowAlways,
);
}
let spans_suggs: Vec<_> = formatters
.into_iter()
.zip(spans_with_counts.iter())
.filter_map(|(fmt, (span, _))| {
if let Some(formatter) = fmt { Some((formatter, span)) } else { None }
})
.map(|(formatter, span)| (*span, formatter(name)))
.collect();
err.multipart_suggestion_with_style(
&format!(
"consider using the `{}` lifetime",
lifetime_names.iter().next().unwrap()
),
spans_suggs,
Applicability::MaybeIncorrect,
SuggestionStyle::ShowAlways,
);
};
let suggest_new = |err: &mut DiagnosticBuilder<'_>, suggs: Vec<Option<String>>| {
for missing in self.missing_named_lifetime_spots.iter().rev() {
let mut introduce_suggestion = vec![];
let msg;
let should_break;
introduce_suggestion.push(match missing {
MissingLifetimeSpot::Generics(generics) => {
if generics.span == DUMMY_SP {
// Account for malformed generics in the HIR. This shouldn't happen,
// but if we make a mistake elsewhere, mainly by keeping something in
// `missing_named_lifetime_spots` that we shouldn't, like associated
// `const`s or making a mistake in the AST lowering we would provide
// non-sensical suggestions. Guard against that by skipping these.
// (#74264)
continue;
}
msg = "consider introducing a named lifetime parameter".to_string();
should_break = true;
if let Some(param) = generics.params.iter().find(|p| {
!matches!(
p.kind,
hir::GenericParamKind::Type {
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
..
}
)
}) {
(param.span.shrink_to_lo(), "'a, ".to_string())
} else {
(generics.span, "<'a>".to_string())
}
}
MissingLifetimeSpot::HigherRanked { span, span_type } => {
msg = format!(
"consider making the {} lifetime-generic with a new `'a` lifetime",
span_type.descr(),
);
should_break = false;
err.note(
"for more information on higher-ranked polymorphism, visit \
https://doc.rust-lang.org/nomicon/hrtb.html",
);
(*span, span_type.suggestion("'a"))
}
MissingLifetimeSpot::Static => {
let mut spans_suggs = Vec::new();
for ((span, count), snippet) in
spans_with_counts.iter().copied().zip(snippets.iter())
{
let (span, sugg) = match snippet.as_deref() {
Some("&") => (span.shrink_to_hi(), "'static ".to_owned()),
Some("'_") => (span, "'static".to_owned()),
Some(snippet) if !snippet.ends_with('>') => {
if snippet == "" {
(
span,
std::iter::repeat("'static")
.take(count)
.collect::<Vec<_>>()
.join(", "),
)
} else if snippet == "<" || snippet == "(" {
(
span.shrink_to_hi(),
std::iter::repeat("'static")
.take(count)
.collect::<Vec<_>>()
.join(", "),
)
} else {
(
span.shrink_to_hi(),
format!(
"<{}>",
std::iter::repeat("'static")
.take(count)
.collect::<Vec<_>>()
.join(", "),
),
)
}
}
_ => continue,
};
spans_suggs.push((span, sugg.to_string()));
}
err.multipart_suggestion_with_style(
"consider using the `'static` lifetime",
spans_suggs,
Applicability::MaybeIncorrect,
SuggestionStyle::ShowAlways,
);
continue;
}
});
for param in params {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(param.span) {
if snippet.starts_with('&') && !snippet.starts_with("&'") {
introduce_suggestion
.push((param.span, format!("&'a {}", &snippet[1..])));
} else if let Some(stripped) = snippet.strip_prefix("&'_ ") {
introduce_suggestion.push((param.span, format!("&'a {}", &stripped)));
}
}
}
for ((span, _), sugg) in spans_with_counts.iter().copied().zip(suggs.iter()) {
if let Some(sugg) = sugg {
introduce_suggestion.push((span, sugg.to_string()));
}
}
err.multipart_suggestion_with_style(
&msg,
introduce_suggestion,
Applicability::MaybeIncorrect,
SuggestionStyle::ShowAlways,
);
if should_break {
break;
}
}
};
let lifetime_names: Vec<_> = lifetime_names.iter().collect();
match &lifetime_names[..] {
[name] => {
let mut suggs: Vec<Option<Box<dyn Fn(&str) -> String>>> = Vec::new();
for (snippet, (_, count)) in snippets.iter().zip(spans_with_counts.iter().copied())
{
suggs.push(match snippet.as_deref() {
Some("&") => Some(Box::new(|name| format!("&{} ", name))),
Some("'_") => Some(Box::new(|n| n.to_string())),
Some("") => Some(Box::new(move |n| format!("{}, ", n).repeat(count))),
Some("<") => Some(Box::new(move |n| {
std::iter::repeat(n).take(count).collect::<Vec<_>>().join(", ")
})),
Some(snippet) if !snippet.ends_with('>') => Some(Box::new(move |name| {
format!(
"{}<{}>",
snippet,
std::iter::repeat(name.to_string())
.take(count)
.collect::<Vec<_>>()
.join(", ")
)
})),
_ => None,
});
}
suggest_existing(err, &name.as_str()[..], suggs);
}
[] => {
let mut suggs = Vec::new();
for (snippet, (_, count)) in
snippets.iter().cloned().zip(spans_with_counts.iter().copied())
{
suggs.push(match snippet.as_deref() {
Some("&") => Some("&'a ".to_string()),
Some("'_") => Some("'a".to_string()),
Some("") => {
Some(std::iter::repeat("'a, ").take(count).collect::<Vec<_>>().join(""))
}
Some("<") => {
Some(std::iter::repeat("'a").take(count).collect::<Vec<_>>().join(", "))
}
Some(snippet) => Some(format!(
"{}<{}>",
snippet,
std::iter::repeat("'a").take(count).collect::<Vec<_>>().join(", "),
)),
None => None,
});
}
suggest_new(err, suggs);
}
lts if lts.len() > 1 => {
err.span_note(lifetime_spans, "these named lifetimes are available to use");
let mut spans_suggs: Vec<_> = Vec::new();
for ((span, _), snippet) in spans_with_counts.iter().copied().zip(snippets.iter()) {
match snippet.as_deref() {
Some("") => spans_suggs.push((span, "'lifetime, ".to_string())),
Some("&") => spans_suggs.push((span, "&'lifetime ".to_string())),
_ => {}
}
}
if spans_suggs.len() > 0 {
// This happens when we have `Foo<T>` where we point at the space before `T`,
// but this can be confusing so we give a suggestion with placeholders.
err.multipart_suggestion_with_style(
"consider using one of the available lifetimes here",
spans_suggs,
Applicability::HasPlaceholders,
SuggestionStyle::ShowAlways,
);
}
}
_ => unreachable!(),
}
}
/// Non-static lifetimes are prohibited in anonymous constants under `min_const_generics`.
/// This function will emit an error if `const_generics` is not enabled, the body identified by
/// `body_id` is an anonymous constant and `lifetime_ref` is non-static.
crate fn maybe_emit_forbidden_non_static_lifetime_error(
&self,
body_id: hir::BodyId,
lifetime_ref: &'tcx hir::Lifetime,
) {
let is_anon_const = matches!(
self.tcx.def_kind(self.tcx.hir().body_owner_def_id(body_id)),
hir::def::DefKind::AnonConst
);
let is_allowed_lifetime = matches!(
lifetime_ref.name,
hir::LifetimeName::Implicit | hir::LifetimeName::Static | hir::LifetimeName::Underscore
);
if !self.tcx.lazy_normalization() && is_anon_const && !is_allowed_lifetime {
feature_err(
&self.tcx.sess.parse_sess,
sym::const_generics,
lifetime_ref.span,
"a non-static lifetime is not allowed in a `const`",
)
.emit();
}
}
}
| 43.832258 | 162 | 0.428056 |
679146c9cb351639b9eb859e275de5ad1124610c | 200 | use warp::Filter;
#[tokio::main]
async fn main() {
let hello = warp::path!("hello" / String).map(|name| format!("Hello, {}!", name));
warp::serve(hello).run(([127, 0, 0, 1], 3030)).await;
}
| 22.222222 | 86 | 0.57 |
5ba847d3515934e2167f3a684258e798f16bc64a | 2,369 | //! HIR (previously known as descriptors) provides a high-level object oriented
//! access to Rust code.
//!
//! The principal difference between HIR and syntax trees is that HIR is bound
//! to a particular crate instance. That is, it has cfg flags and features
//! applied. So, the relation between syntax and HIR is many-to-one.
#![recursion_limit = "512"]
macro_rules! impl_froms {
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => {
$(
impl From<$v> for $e {
fn from(it: $v) -> $e {
$e::$v(it)
}
}
$($(
impl From<$sv> for $e {
fn from(it: $sv) -> $e {
$e::$v($v::$sv(it))
}
}
)*)?
)*
}
}
pub mod debug;
pub mod db;
pub mod source_binder;
mod ids;
mod adt;
mod traits;
mod type_alias;
mod ty;
mod impl_block;
mod expr;
mod lang_item;
pub mod generics;
mod resolve;
pub mod diagnostics;
mod util;
mod from_id;
mod code_model;
pub mod from_source;
#[cfg(test)]
mod test_db;
#[cfg(test)]
mod marks;
use hir_expand::AstId;
use crate::{ids::MacroFileKind, resolve::Resolver};
pub use crate::{
adt::VariantDef,
code_model::{
attrs::{AttrDef, Attrs},
docs::{DocDef, Docs, Documentation},
src::{HasBodySource, HasSource},
Adt, AssocItem, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, Enum,
EnumVariant, FieldSource, FnData, Function, GenericParam, HasBody, Local, MacroDef, Module,
ModuleDef, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
},
expr::ExprScopes,
from_source::FromSource,
generics::GenericDef,
ids::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile},
impl_block::ImplBlock,
resolve::ScopeDef,
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
ty::{
display::HirDisplay,
primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness, Uncertain},
ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
},
};
pub use hir_def::{
builtin_type::BuiltinType,
nameres::{per_ns::PerNs, raw::ImportId},
path::{Path, PathKind},
type_ref::Mutability,
};
pub use hir_expand::{either::Either, name::Name, Source};
| 25.75 | 99 | 0.607851 |
d7524193c8aea7e274b17655d79792b1df066642 | 2,689 | // Copyright 2017, Igor Shaula
// Licensed under the MIT License <LICENSE or
// http://opensource.org/licenses/MIT>. This file
// may not be copied, modified, or distributed
// except according to those terms.
use super::enums::*;
use super::RegKey;
use std::error::Error;
use std::fmt;
use std::io;
use winapi::shared::minwindef::DWORD;
macro_rules! read_value {
($s:ident) => {
match mem::replace(&mut $s.f_name, None) {
Some(ref s) => $s.key.get_value(s).map_err(DecoderError::IoError),
None => Err(DecoderError::NoFieldName),
}
};
}
macro_rules! parse_string {
($s:ident) => {{
let s: String = read_value!($s)?;
s.parse()
.map_err(|e| DecoderError::ParseError(format!("{:?}", e)))
}};
}
macro_rules! no_impl {
($e:expr) => {
Err(DecoderError::DecodeNotImplemented($e.to_owned()))
};
}
#[cfg(feature = "serialization-serde")]
mod serialization_serde;
#[derive(Debug)]
pub enum DecoderError {
DecodeNotImplemented(String),
DeserializerError(String),
IoError(io::Error),
ParseError(String),
NoFieldName,
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for DecoderError {
fn description(&self) -> &str {
use self::DecoderError::*;
match *self {
DecodeNotImplemented(ref s) | DeserializerError(ref s) | ParseError(ref s) => s,
IoError(ref e) => e.description(),
NoFieldName => "No field name",
}
}
}
impl From<io::Error> for DecoderError {
fn from(err: io::Error) -> DecoderError {
DecoderError::IoError(err)
}
}
pub type DecodeResult<T> = Result<T, DecoderError>;
#[derive(Debug)]
enum DecoderReadingState {
WaitingForKey,
WaitingForValue,
}
#[derive(Debug)]
enum DecoderEnumerationState {
EnumeratingKeys(DWORD),
EnumeratingValues(DWORD),
}
#[derive(Debug)]
pub struct Decoder {
key: RegKey,
f_name: Option<String>,
reading_state: DecoderReadingState,
enumeration_state: DecoderEnumerationState,
}
const DECODER_SAM: DWORD = KEY_QUERY_VALUE | KEY_ENUMERATE_SUB_KEYS;
impl Decoder {
pub fn from_key(key: &RegKey) -> DecodeResult<Decoder> {
key.open_subkey_with_flags("", DECODER_SAM)
.map(Decoder::new)
.map_err(DecoderError::IoError)
}
fn new(key: RegKey) -> Decoder {
Decoder {
key: key,
f_name: None,
reading_state: DecoderReadingState::WaitingForKey,
enumeration_state: DecoderEnumerationState::EnumeratingKeys(0),
}
}
}
| 24.225225 | 92 | 0.62328 |
ab625e7bb5a1eaef7376190fd03a509575bd602c | 34,895 | // This file is generated by rust-protobuf 2.17.0. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![rustfmt::skip]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `proto/gerdu.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_17_0;
#[derive(PartialEq,Clone,Default)]
pub struct PutResponse {
// message fields
pub created: bool,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a PutResponse {
fn default() -> &'a PutResponse {
<PutResponse as ::protobuf::Message>::default_instance()
}
}
impl PutResponse {
pub fn new() -> PutResponse {
::std::default::Default::default()
}
// bool created = 1;
pub fn get_created(&self) -> bool {
self.created
}
pub fn clear_created(&mut self) {
self.created = false;
}
// Param is passed by value, moved
pub fn set_created(&mut self, v: bool) {
self.created = v;
}
}
impl ::protobuf::Message for PutResponse {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.created = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.created != false {
my_size += 2;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.created != false {
os.write_bool(1, self.created)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> PutResponse {
PutResponse::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"created",
|m: &PutResponse| { &m.created },
|m: &mut PutResponse| { &mut m.created },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<PutResponse>(
"PutResponse",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static PutResponse {
static instance: ::protobuf::rt::LazyV2<PutResponse> = ::protobuf::rt::LazyV2::INIT;
instance.get(PutResponse::new)
}
}
impl ::protobuf::Clear for PutResponse {
fn clear(&mut self) {
self.created = false;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for PutResponse {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for PutResponse {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct PutRequest {
// message fields
pub key: ::std::string::String,
pub value: ::std::vec::Vec<u8>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a PutRequest {
fn default() -> &'a PutRequest {
<PutRequest as ::protobuf::Message>::default_instance()
}
}
impl PutRequest {
pub fn new() -> PutRequest {
::std::default::Default::default()
}
// string key = 1;
pub fn get_key(&self) -> &str {
&self.key
}
pub fn clear_key(&mut self) {
self.key.clear();
}
// Param is passed by value, moved
pub fn set_key(&mut self, v: ::std::string::String) {
self.key = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_key(&mut self) -> &mut ::std::string::String {
&mut self.key
}
// Take field
pub fn take_key(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.key, ::std::string::String::new())
}
// bytes value = 2;
pub fn get_value(&self) -> &[u8] {
&self.value
}
pub fn clear_value(&mut self) {
self.value.clear();
}
// Param is passed by value, moved
pub fn set_value(&mut self, v: ::std::vec::Vec<u8>) {
self.value = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_value(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.value
}
// Take field
pub fn take_value(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.value, ::std::vec::Vec::new())
}
}
impl ::protobuf::Message for PutRequest {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key)?;
},
2 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.value)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.key.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.key);
}
if !self.value.is_empty() {
my_size += ::protobuf::rt::bytes_size(2, &self.value);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.key.is_empty() {
os.write_string(1, &self.key)?;
}
if !self.value.is_empty() {
os.write_bytes(2, &self.value)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> PutRequest {
PutRequest::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"key",
|m: &PutRequest| { &m.key },
|m: &mut PutRequest| { &mut m.key },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"value",
|m: &PutRequest| { &m.value },
|m: &mut PutRequest| { &mut m.value },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<PutRequest>(
"PutRequest",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static PutRequest {
static instance: ::protobuf::rt::LazyV2<PutRequest> = ::protobuf::rt::LazyV2::INIT;
instance.get(PutRequest::new)
}
}
impl ::protobuf::Clear for PutRequest {
fn clear(&mut self) {
self.key.clear();
self.value.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for PutRequest {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for PutRequest {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct GetRequest {
// message fields
pub key: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a GetRequest {
fn default() -> &'a GetRequest {
<GetRequest as ::protobuf::Message>::default_instance()
}
}
impl GetRequest {
pub fn new() -> GetRequest {
::std::default::Default::default()
}
// string key = 1;
pub fn get_key(&self) -> &str {
&self.key
}
pub fn clear_key(&mut self) {
self.key.clear();
}
// Param is passed by value, moved
pub fn set_key(&mut self, v: ::std::string::String) {
self.key = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_key(&mut self) -> &mut ::std::string::String {
&mut self.key
}
// Take field
pub fn take_key(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.key, ::std::string::String::new())
}
}
impl ::protobuf::Message for GetRequest {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.key.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.key);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.key.is_empty() {
os.write_string(1, &self.key)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> GetRequest {
GetRequest::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"key",
|m: &GetRequest| { &m.key },
|m: &mut GetRequest| { &mut m.key },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<GetRequest>(
"GetRequest",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static GetRequest {
static instance: ::protobuf::rt::LazyV2<GetRequest> = ::protobuf::rt::LazyV2::INIT;
instance.get(GetRequest::new)
}
}
impl ::protobuf::Clear for GetRequest {
fn clear(&mut self) {
self.key.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for GetRequest {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for GetRequest {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct GetResponse {
// message fields
pub value: ::std::vec::Vec<u8>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a GetResponse {
fn default() -> &'a GetResponse {
<GetResponse as ::protobuf::Message>::default_instance()
}
}
impl GetResponse {
pub fn new() -> GetResponse {
::std::default::Default::default()
}
// bytes value = 1;
pub fn get_value(&self) -> &[u8] {
&self.value
}
pub fn clear_value(&mut self) {
self.value.clear();
}
// Param is passed by value, moved
pub fn set_value(&mut self, v: ::std::vec::Vec<u8>) {
self.value = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_value(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.value
}
// Take field
pub fn take_value(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.value, ::std::vec::Vec::new())
}
}
impl ::protobuf::Message for GetResponse {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.value)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.value.is_empty() {
my_size += ::protobuf::rt::bytes_size(1, &self.value);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.value.is_empty() {
os.write_bytes(1, &self.value)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> GetResponse {
GetResponse::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"value",
|m: &GetResponse| { &m.value },
|m: &mut GetResponse| { &mut m.value },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<GetResponse>(
"GetResponse",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static GetResponse {
static instance: ::protobuf::rt::LazyV2<GetResponse> = ::protobuf::rt::LazyV2::INIT;
instance.get(GetResponse::new)
}
}
impl ::protobuf::Clear for GetResponse {
fn clear(&mut self) {
self.value.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for GetResponse {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for GetResponse {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct DeleteRequest {
// message fields
pub key: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a DeleteRequest {
fn default() -> &'a DeleteRequest {
<DeleteRequest as ::protobuf::Message>::default_instance()
}
}
impl DeleteRequest {
pub fn new() -> DeleteRequest {
::std::default::Default::default()
}
// string key = 1;
pub fn get_key(&self) -> &str {
&self.key
}
pub fn clear_key(&mut self) {
self.key.clear();
}
// Param is passed by value, moved
pub fn set_key(&mut self, v: ::std::string::String) {
self.key = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_key(&mut self) -> &mut ::std::string::String {
&mut self.key
}
// Take field
pub fn take_key(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.key, ::std::string::String::new())
}
}
impl ::protobuf::Message for DeleteRequest {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.key.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.key);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.key.is_empty() {
os.write_string(1, &self.key)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> DeleteRequest {
DeleteRequest::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"key",
|m: &DeleteRequest| { &m.key },
|m: &mut DeleteRequest| { &mut m.key },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<DeleteRequest>(
"DeleteRequest",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static DeleteRequest {
static instance: ::protobuf::rt::LazyV2<DeleteRequest> = ::protobuf::rt::LazyV2::INIT;
instance.get(DeleteRequest::new)
}
}
impl ::protobuf::Clear for DeleteRequest {
fn clear(&mut self) {
self.key.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for DeleteRequest {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for DeleteRequest {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct DeleteResponse {
// message fields
pub deleted: bool,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a DeleteResponse {
fn default() -> &'a DeleteResponse {
<DeleteResponse as ::protobuf::Message>::default_instance()
}
}
impl DeleteResponse {
pub fn new() -> DeleteResponse {
::std::default::Default::default()
}
// bool deleted = 1;
pub fn get_deleted(&self) -> bool {
self.deleted
}
pub fn clear_deleted(&mut self) {
self.deleted = false;
}
// Param is passed by value, moved
pub fn set_deleted(&mut self, v: bool) {
self.deleted = v;
}
}
impl ::protobuf::Message for DeleteResponse {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_bool()?;
self.deleted = tmp;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.deleted != false {
my_size += 2;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if self.deleted != false {
os.write_bool(1, self.deleted)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> DeleteResponse {
DeleteResponse::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBool>(
"deleted",
|m: &DeleteResponse| { &m.deleted },
|m: &mut DeleteResponse| { &mut m.deleted },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<DeleteResponse>(
"DeleteResponse",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static DeleteResponse {
static instance: ::protobuf::rt::LazyV2<DeleteResponse> = ::protobuf::rt::LazyV2::INIT;
instance.get(DeleteResponse::new)
}
}
impl ::protobuf::Clear for DeleteResponse {
fn clear(&mut self) {
self.deleted = false;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for DeleteResponse {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for DeleteResponse {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n\x11proto/gerdu.proto\x12\x05gerdu\"'\n\x0bPutResponse\x12\x18\n\x07cr\
eated\x18\x01\x20\x01(\x08R\x07created\"4\n\nPutRequest\x12\x10\n\x03key\
\x18\x01\x20\x01(\tR\x03key\x12\x14\n\x05value\x18\x02\x20\x01(\x0cR\x05\
value\"\x1e\n\nGetRequest\x12\x10\n\x03key\x18\x01\x20\x01(\tR\x03key\"#\
\n\x0bGetResponse\x12\x14\n\x05value\x18\x01\x20\x01(\x0cR\x05value\"!\n\
\rDeleteRequest\x12\x10\n\x03key\x18\x01\x20\x01(\tR\x03key\"*\n\x0eDele\
teResponse\x12\x18\n\x07deleted\x18\x01\x20\x01(\x08R\x07deleted2\x9a\
\x01\n\x05Gerdu\x12,\n\x03Put\x12\x11.gerdu.PutRequest\x1a\x12.gerdu.Put\
Response\x12,\n\x03Get\x12\x11.gerdu.GetRequest\x1a\x12.gerdu.GetRespons\
e\x125\n\x06Delete\x12\x14.gerdu.DeleteRequest\x1a\x15.gerdu.DeleteRespo\
nseB+\n\x1acom.amirrazmjou.gerdu.javaP\x01Z\x0bproto;protoJ\xa9\x06\n\
\x06\x12\x04\0\0%\x01\n\x08\n\x01\x0c\x12\x03\0\0\x12\n\x08\n\x01\x08\
\x12\x03\x02\0\"\n\t\n\x02\x08\x0b\x12\x03\x02\0\"\n\x08\n\x01\x08\x12\
\x03\x03\0\"\n\t\n\x02\x08\n\x12\x03\x03\0\"\n\x08\n\x01\x08\x12\x03\x04\
\03\n\t\n\x02\x08\x01\x12\x03\x04\03\n\x08\n\x01\x02\x12\x03\x06\0\x0e\n\
\n\n\x02\x06\0\x12\x04\x08\0\x0c\x01\n\n\n\x03\x06\0\x01\x12\x03\x08\x08\
\r\n\x0b\n\x04\x06\0\x02\0\x12\x03\t\x04.\n\x0c\n\x05\x06\0\x02\0\x01\
\x12\x03\t\x08\x0b\n\x0c\n\x05\x06\0\x02\0\x02\x12\x03\t\x0c\x16\n\x0c\n\
\x05\x06\0\x02\0\x03\x12\x03\t!,\n\x0b\n\x04\x06\0\x02\x01\x12\x03\n\x04\
.\n\x0c\n\x05\x06\0\x02\x01\x01\x12\x03\n\x08\x0b\n\x0c\n\x05\x06\0\x02\
\x01\x02\x12\x03\n\x0c\x16\n\x0c\n\x05\x06\0\x02\x01\x03\x12\x03\n!,\n\
\x0b\n\x04\x06\0\x02\x02\x12\x03\x0b\x047\n\x0c\n\x05\x06\0\x02\x02\x01\
\x12\x03\x0b\x08\x0e\n\x0c\n\x05\x06\0\x02\x02\x02\x12\x03\x0b\x0f\x1c\n\
\x0c\n\x05\x06\0\x02\x02\x03\x12\x03\x0b'5\n\n\n\x02\x04\0\x12\x04\x0e\0\
\x10\x01\n\n\n\x03\x04\0\x01\x12\x03\x0e\x08\x13\n\x0b\n\x04\x04\0\x02\0\
\x12\x03\x0f\x04\x15\n\x0c\n\x05\x04\0\x02\0\x05\x12\x03\x0f\x04\x08\n\
\x0c\n\x05\x04\0\x02\0\x01\x12\x03\x0f\t\x10\n\x0c\n\x05\x04\0\x02\0\x03\
\x12\x03\x0f\x13\x14\n\n\n\x02\x04\x01\x12\x04\x12\0\x15\x01\n\n\n\x03\
\x04\x01\x01\x12\x03\x12\x08\x12\n\x0b\n\x04\x04\x01\x02\0\x12\x03\x13\
\x04\x13\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03\x13\x04\n\n\x0c\n\x05\x04\
\x01\x02\0\x01\x12\x03\x13\x0b\x0e\n\x0c\n\x05\x04\x01\x02\0\x03\x12\x03\
\x13\x11\x12\n\x0b\n\x04\x04\x01\x02\x01\x12\x03\x14\x04\x14\n\x0c\n\x05\
\x04\x01\x02\x01\x05\x12\x03\x14\x04\t\n\x0c\n\x05\x04\x01\x02\x01\x01\
\x12\x03\x14\n\x0f\n\x0c\n\x05\x04\x01\x02\x01\x03\x12\x03\x14\x12\x13\n\
\n\n\x02\x04\x02\x12\x04\x17\0\x19\x01\n\n\n\x03\x04\x02\x01\x12\x03\x17\
\x08\x12\n\x0b\n\x04\x04\x02\x02\0\x12\x03\x18\x04\x13\n\x0c\n\x05\x04\
\x02\x02\0\x05\x12\x03\x18\x04\n\n\x0c\n\x05\x04\x02\x02\0\x01\x12\x03\
\x18\x0b\x0e\n\x0c\n\x05\x04\x02\x02\0\x03\x12\x03\x18\x11\x12\n\n\n\x02\
\x04\x03\x12\x04\x1b\0\x1d\x01\n\n\n\x03\x04\x03\x01\x12\x03\x1b\x08\x13\
\n\x0b\n\x04\x04\x03\x02\0\x12\x03\x1c\x04\x14\n\x0c\n\x05\x04\x03\x02\0\
\x05\x12\x03\x1c\x04\t\n\x0c\n\x05\x04\x03\x02\0\x01\x12\x03\x1c\n\x0f\n\
\x0c\n\x05\x04\x03\x02\0\x03\x12\x03\x1c\x12\x13\n\n\n\x02\x04\x04\x12\
\x04\x1f\0!\x01\n\n\n\x03\x04\x04\x01\x12\x03\x1f\x08\x15\n\x0b\n\x04\
\x04\x04\x02\0\x12\x03\x20\x04\x13\n\x0c\n\x05\x04\x04\x02\0\x05\x12\x03\
\x20\x04\n\n\x0c\n\x05\x04\x04\x02\0\x01\x12\x03\x20\x0b\x0e\n\x0c\n\x05\
\x04\x04\x02\0\x03\x12\x03\x20\x11\x12\n\n\n\x02\x04\x05\x12\x04#\0%\x01\
\n\n\n\x03\x04\x05\x01\x12\x03#\x08\x16\n\x0b\n\x04\x04\x05\x02\0\x12\
\x03$\x04\x15\n\x0c\n\x05\x04\x05\x02\0\x05\x12\x03$\x04\x08\n\x0c\n\x05\
\x04\x05\x02\0\x01\x12\x03$\t\x10\n\x0c\n\x05\x04\x05\x02\0\x03\x12\x03$\
\x13\x14b\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
| 32.551306 | 134 | 0.577962 |
39898c8048cc60c5de41035dcd95c3f25f410ff8 | 6,701 | // Generated by `scripts/generate.js`
use utils::c_bindings::*;
use utils::vk_traits::*;
use utils::vk_ptr::*;
use utils::vk_convert::*;
use std::os::raw::c_char;
use std::ops::Drop;
use std::ptr;
use std::mem;
use std::cmp;
use std::slice;
use vulkan::*;
use vulkan::vk::*;
#[doc(hidden)]
pub type RawVkImage = u64;
/// Wrapper for [VkImage](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkImage.html).
#[derive(Debug, Clone, Copy)]
pub struct VkImage {
_handle: RawVkImage,
_fn_table: *mut VkFunctionTable
}
impl VkRawType<VkImage> for RawVkImage {
fn vk_to_wrapped(src: &RawVkImage) -> VkImage {
VkImage {
_handle: *src,
_fn_table: ptr::null_mut()
}
}
}
impl VkWrappedType<RawVkImage> for VkImage {
fn vk_to_raw(src: &VkImage, dst: &mut RawVkImage) {
*dst = src._handle
}
}
impl Default for VkImage {
fn default() -> VkImage {
VkImage {
_handle: 0,
_fn_table: ptr::null_mut()
}
}
}
impl PartialEq for VkImage {
fn eq(&self, other: &VkImage) -> bool {
self._handle == other._handle
}
}
impl VkSetup for VkImage {
fn vk_setup(&mut self, fn_table: *mut VkFunctionTable) {
self._fn_table = fn_table;
}
}
impl VkImage {
/// Returns the internal Vulkan handle for the object.
pub fn vk_handle(&self) -> u64 {
self._handle
}
/// Indicates if the Vulkan internal handle for this object is 0.
pub fn is_null(&self) -> bool {
self._handle == 0
}
/// Creates an object with a null Vulkan internal handle.
///
/// Calling a method with a null handle will most likely result in a crash.
pub fn null() -> Self {
Self {
_handle: 0,
_fn_table: ptr::null_mut()
}
}
/// Wrapper for [vkBindImageMemory](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkBindImageMemory.html).
pub fn bind_memory(&self, memory: VkDeviceMemory, memory_offset: usize) -> LavaResult<()> {
unsafe {
let raw_memory = vk_to_raw_value(&memory);
let raw_memory_offset = vk_to_raw_value(&memory_offset);
let vk_result = ((&*self._fn_table).vkBindImageMemory)((*self._fn_table).device, self._handle, raw_memory, raw_memory_offset);
if vk_result == 0 { Ok(()) } else { Err((RawVkResult::vk_to_wrapped(&vk_result), ())) }
}
}
/// Wrapper for [vkGetImageMemoryRequirements](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkGetImageMemoryRequirements.html).
pub fn get_memory_requirements(&self) -> VkMemoryRequirements {
unsafe {
let raw_memory_requirements = &mut mem::zeroed() as *mut RawVkMemoryRequirements;
((&*self._fn_table).vkGetImageMemoryRequirements)((*self._fn_table).device, self._handle, raw_memory_requirements);
let mut memory_requirements = new_vk_value(raw_memory_requirements);
let fn_table = self._fn_table;
VkSetup::vk_setup(&mut memory_requirements, fn_table);
memory_requirements
}
}
/// Wrapper for [vkGetImageSparseMemoryRequirements](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkGetImageSparseMemoryRequirements.html).
pub fn get_sparse_memory_requirements(&self) -> Vec<VkSparseImageMemoryRequirements> {
unsafe {
let mut raw_sparse_memory_requirements : *mut RawVkSparseImageMemoryRequirements = ptr::null_mut();
let raw_sparse_memory_requirement_count = &mut mem::zeroed() as *mut u32;
((&*self._fn_table).vkGetImageSparseMemoryRequirements)((*self._fn_table).device, self._handle, raw_sparse_memory_requirement_count, raw_sparse_memory_requirements);
raw_sparse_memory_requirements = calloc(*raw_sparse_memory_requirement_count as usize, mem::size_of::<RawVkSparseImageMemoryRequirements>()) as *mut RawVkSparseImageMemoryRequirements;
((&*self._fn_table).vkGetImageSparseMemoryRequirements)((*self._fn_table).device, self._handle, raw_sparse_memory_requirement_count, raw_sparse_memory_requirements);
let mut sparse_memory_requirements = new_vk_array(*raw_sparse_memory_requirement_count, raw_sparse_memory_requirements);
for elt in &mut sparse_memory_requirements { VkSetup::vk_setup(elt, self._fn_table); }
free(raw_sparse_memory_requirements as *mut u8);
sparse_memory_requirements
}
}
/// Wrapper for [vkDestroyImage](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkDestroyImage.html).
pub fn destroy(&self) {
unsafe {
((&*self._fn_table).vkDestroyImage)((*self._fn_table).device, self._handle, ptr::null());
}
}
/// Wrapper for [vkGetImageSubresourceLayout](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkGetImageSubresourceLayout.html).
pub fn get_subresource_layout(&self, subresource: VkImageSubresource) -> VkSubresourceLayout {
unsafe {
let raw_subresource = new_ptr_vk_value(&subresource);
let raw_layout = &mut mem::zeroed() as *mut RawVkSubresourceLayout;
((&*self._fn_table).vkGetImageSubresourceLayout)((*self._fn_table).device, self._handle, raw_subresource, raw_layout);
let mut layout = new_vk_value(raw_layout);
let fn_table = self._fn_table;
VkSetup::vk_setup(&mut layout, fn_table);
free_vk_ptr(raw_subresource);
layout
}
}
/// Wrapper for [vkGetImageDrmFormatModifierPropertiesEXT](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html).
pub fn get_drm_format_modifier_properties(&self) -> LavaResult<ext::VkImageDrmFormatModifierProperties> {
unsafe {
let mut vk_result = 0;
let raw_properties = &mut mem::zeroed() as *mut ext::RawVkImageDrmFormatModifierProperties;
vk_result = ((&*self._fn_table).vkGetImageDrmFormatModifierPropertiesEXT)((*self._fn_table).device, self._handle, raw_properties);
let mut properties = new_vk_value(raw_properties);
if vk_result == 0 {
let fn_table = self._fn_table;
VkSetup::vk_setup(&mut properties, fn_table);
}
if vk_result == 0 { Ok(properties) } else { Err((RawVkResult::vk_to_wrapped(&vk_result), properties)) }
}
}
} | 40.859756 | 196 | 0.65632 |
56701ca5e7d917115fc06c13458ceb92e022d4ec | 880 | use pulldown_dmark::{html, Parser};
fn main() {
let markdown_input: &str = "Hello world, this is a ~~complicated~~ *very simple* example.";
println!("Parsing the following markdown string:\n{}", markdown_input);
// Set up options and parser. Strikethroughs are not part of the CommonMark standard
// and we therefore must enable it explicitly.
let parser = Parser::new(markdown_input);
// Write to String buffer.
let mut html_output: String = String::with_capacity(markdown_input.len() * 3 / 2);
html::push_html(&mut html_output, parser);
// Check that the output is what we expected.
let expected_html: &str =
"<p>Hello world, this is a <del>complicated</del> <em>very simple</em> example.</p>\n";
assert_eq!(expected_html, &html_output);
// Write result to stdout.
println!("\nHTML output:\n{}", &html_output);
}
| 38.26087 | 95 | 0.676136 |
ffbd6123cd99b571a5c3209827d650f1aae2eefb | 1,308 | #![feature(const_str_as_bytes)]
#![feature(const_slice_len)]
#![feature(const_str_len)]
#![feature(const_raw_ptr_deref)]
use tracelogging::*;
fn main() {
let handle = tracelogging_register!(
"3970f9cf-2c0c-4f11-b1cc-e3a1e9958833",
SimpleTraceLoggingProvider
);
let activity1 = tracelogging_start!(handle, "main");
let var1 = 42;
let var2 = "first";
tracelogging!(handle, "myEvent1", var1, var2);
tracelogging!(handle, "myEvent2");
let var3 = format!("{}", 3);
let activity2 = tracelogging_start!(handle, "myEvent3", var1, var2);
tracelogging_stop!(handle, activity2, "myEvent3", var1, var3);
tracelogging_expr!(
handle,
"myEvent4",
|| {
tracelogging_tagged!(handle, "myEvent5", var1, var2, var3);
},
var1,
var2
);
assert_eq!(
3,
tracelogging_expr!(handle, "myEvent6", {
tracelogging_tagged!(handle, "myEvent7", var1, var2);
2 + 1
})
);
assert_eq!(
3,
tracelogging_fun!(handle, "myEvent6", || {
tracelogging_tagged!(handle, "myEvent7", var1, var2);
2 + 1
})
);
tracelogging_stop!(handle, activity1, "main", var1, var3);
tracelogging_un_register!(handle);
}
| 24.679245 | 72 | 0.582569 |
644edbacfcb18884b45c691e93222810b54e4f57 | 1,757 | // errors2.rs
// Say we're writing a game where you can buy items with tokens. All items cost
// 5 tokens, and whenever you purchase items there is a processing fee of 1
// token. A player of the game will type in how many items they want to buy,
// and the `total_cost` function will calculate the total number of tokens.
// Since the player typed in the quantity, though, we get it as a string-- and
// they might have typed anything, not just numbers!
// Right now, this function isn't handling the error case at all (and isn't
// handling the success case properly either). What we want to do is:
// if we call the `parse` function on a string that is not a number, that
// function will return a `ParseIntError`, and in that case, we want to
// immediately return that error from our function and not try to multiply
// and add.
// There are at least two ways to implement this that are both correct-- but
// one is a lot shorter! Execute `rustlings hint errors2` for hints to both ways.
use std::num::ParseIntError;
pub fn total_cost(item_quantity: &str) -> Result<i32, ParseIntError> {
let processing_fee = 1;
let cost_per_item = 5;
let qty = item_quantity.parse::<i32>()?;
Ok(qty * cost_per_item + processing_fee)
// OR
// match item_quantity.parse::<i32>() {
// Ok(num) => Ok(num * cost_per_item + processing_fee),
// Err(e) => Err(e),
// }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn item_quantity_is_a_valid_number() {
assert_eq!(total_cost("34"), Ok(171));
}
#[test]
fn item_quantity_is_an_invalid_number() {
assert_eq!(
total_cost("beep boop").unwrap_err().to_string(),
"invalid digit found in string"
);
}
}
| 35.14 | 81 | 0.669892 |
ff7133f5d0c971efec75047d85e6e961eb1a5c19 | 121,035 | // Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Rustdoc's HTML Rendering module
//!
//! This modules contains the bulk of the logic necessary for rendering a
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
//! rendering process is largely driven by the `format!` syntax extension to
//! perform all I/O into files and streams.
//!
//! The rendering process is largely driven by the `Context` and `Cache`
//! structures. The cache is pre-populated by crawling the crate in question,
//! and then it is shared among the various rendering threads. The cache is meant
//! to be a fairly large structure not implementing `Clone` (because it's shared
//! among threads). The context, however, should be a lightweight structure. This
//! is cloned per-thread and contains information about what is currently being
//! rendered.
//!
//! In order to speed up rendering (mostly because of markdown rendering), the
//! rendering process has been parallelized. This parallelization is only
//! exposed through the `crate` method on the context, and then also from the
//! fact that the shared cache is stored in TLS (and must be accessed as such).
//!
//! In addition to rendering the crate itself, this module is also responsible
//! for creating the corresponding search index and source file renderings.
//! These threads are not parallelized (they haven't been a bottleneck yet), and
//! both occur before the crate is rendered.
pub use self::ExternalLocation::*;
use std::ascii::AsciiExt;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::default::Default;
use std::error;
use std::fmt::{self, Display, Formatter, Write as FmtWrite};
use std::fs::{self, File, OpenOptions};
use std::io::prelude::*;
use std::io::{self, BufWriter, BufReader};
use std::iter::repeat;
use std::mem;
use std::path::{PathBuf, Path, Component};
use std::str;
use std::sync::Arc;
use externalfiles::ExternalHtml;
use serialize::json::{ToJson, Json, as_json};
use syntax::{abi, ast};
use syntax::feature_gate::UnstableFeatures;
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
use rustc::middle::privacy::AccessLevels;
use rustc::middle::stability;
use rustc::hir;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::flock;
use clean::{self, AttributesExt, GetDefId, SelfTy, Mutability};
use doctree;
use fold::DocFolder;
use html::escape::Escape;
use html::format::{ConstnessSpace};
use html::format::{TyParamBounds, WhereClause, href, AbiSpace};
use html::format::{VisSpace, Method, UnsafetySpace, MutableSpace};
use html::format::fmt_impl_for_trait_page;
use html::item_type::ItemType;
use html::markdown::{self, Markdown};
use html::{highlight, layout};
/// A pair of name and its optional document.
pub type NameDoc = (String, Option<String>);
/// Major driving force in all rustdoc rendering. This contains information
/// about where in the tree-like hierarchy rendering is occurring and controls
/// how the current page is being rendered.
///
/// It is intended that this context is a lightweight object which can be fairly
/// easily cloned because it is cloned per work-job (about once per item in the
/// rustdoc tree).
#[derive(Clone)]
pub struct Context {
/// Current hierarchy of components leading down to what's currently being
/// rendered
pub current: Vec<String>,
/// The current destination folder of where HTML artifacts should be placed.
/// This changes as the context descends into the module hierarchy.
pub dst: PathBuf,
/// A flag, which when `true`, will render pages which redirect to the
/// real location of an item. This is used to allow external links to
/// publicly reused items to redirect to the right location.
pub render_redirect_pages: bool,
pub shared: Arc<SharedContext>,
}
pub struct SharedContext {
/// The path to the crate root source minus the file name.
/// Used for simplifying paths to the highlighted source code files.
pub src_root: PathBuf,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
/// This flag indicates whether [src] links should be generated or not. If
/// the source files are present in the html rendering, then this will be
/// `true`.
pub include_sources: bool,
/// The local file sources we've emitted and their respective url-paths.
pub local_sources: FxHashMap<PathBuf, String>,
/// All the passes that were run on this crate.
pub passes: FxHashSet<String>,
/// The base-URL of the issue tracker for when an item has been tagged with
/// an issue number.
pub issue_tracker_base_url: Option<String>,
/// The given user css file which allow to customize the generated
/// documentation theme.
pub css_file_extension: Option<PathBuf>,
}
/// Indicates where an external crate can be found.
pub enum ExternalLocation {
/// Remote URL root of the external crate
Remote(String),
/// This external crate can be found in the local doc/ folder
Local,
/// The external crate could not be found.
Unknown,
}
/// Metadata about an implementor of a trait.
pub struct Implementor {
pub def_id: DefId,
pub stability: Option<clean::Stability>,
pub impl_: clean::Impl,
}
/// Metadata about implementations for a type.
#[derive(Clone)]
pub struct Impl {
pub impl_item: clean::Item,
}
impl Impl {
fn inner_impl(&self) -> &clean::Impl {
match self.impl_item.inner {
clean::ImplItem(ref impl_) => impl_,
_ => panic!("non-impl item found in impl")
}
}
fn trait_did(&self) -> Option<DefId> {
self.inner_impl().trait_.def_id()
}
}
#[derive(Debug)]
pub struct Error {
file: PathBuf,
error: io::Error,
}
impl error::Error for Error {
fn description(&self) -> &str {
self.error.description()
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "\"{}\": {}", self.file.display(), self.error)
}
}
impl Error {
pub fn new(e: io::Error, file: &Path) -> Error {
Error {
file: file.to_path_buf(),
error: e,
}
}
}
macro_rules! try_err {
($e:expr, $file:expr) => ({
match $e {
Ok(e) => e,
Err(e) => return Err(Error::new(e, $file)),
}
})
}
/// This cache is used to store information about the `clean::Crate` being
/// rendered in order to provide more useful documentation. This contains
/// information like all implementors of a trait, all traits a type implements,
/// documentation for all known traits, etc.
///
/// This structure purposefully does not implement `Clone` because it's intended
/// to be a fairly large and expensive structure to clone. Instead this adheres
/// to `Send` so it may be stored in a `Arc` instance and shared among the various
/// rendering threads.
#[derive(Default)]
pub struct Cache {
/// Mapping of typaram ids to the name of the type parameter. This is used
/// when pretty-printing a type (so pretty printing doesn't have to
/// painfully maintain a context like this)
pub typarams: FxHashMap<DefId, String>,
/// Maps a type id to all known implementations for that type. This is only
/// recognized for intra-crate `ResolvedPath` types, and is used to print
/// out extra documentation on the page of an enum/struct.
///
/// The values of the map are a list of implementations and documentation
/// found on that implementation.
pub impls: FxHashMap<DefId, Vec<Impl>>,
/// Maintains a mapping of local crate node ids to the fully qualified name
/// and "short type description" of that node. This is used when generating
/// URLs when a type is being linked to. External paths are not located in
/// this map because the `External` type itself has all the information
/// necessary.
pub paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
/// Similar to `paths`, but only holds external paths. This is only used for
/// generating explicit hyperlinks to other crates.
pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
/// This map contains information about all known traits of this crate.
/// Implementations of a crate should inherit the documentation of the
/// parent trait if no extra documentation is specified, and default methods
/// should show up in documentation about trait implementations.
pub traits: FxHashMap<DefId, clean::Trait>,
/// When rendering traits, it's often useful to be able to list all
/// implementors of the trait, and this mapping is exactly, that: a mapping
/// of trait ids to the list of known implementors of the trait
pub implementors: FxHashMap<DefId, Vec<Implementor>>,
/// Cache of where external crate documentation can be found.
pub extern_locations: FxHashMap<CrateNum, (String, PathBuf, ExternalLocation)>,
/// Cache of where documentation for primitives can be found.
pub primitive_locations: FxHashMap<clean::PrimitiveType, DefId>,
// Note that external items for which `doc(hidden)` applies to are shown as
// non-reachable while local items aren't. This is because we're reusing
// the access levels from crateanalysis.
pub access_levels: Arc<AccessLevels<DefId>>,
// Private fields only used when initially crawling a crate to build a cache
stack: Vec<String>,
parent_stack: Vec<DefId>,
parent_is_trait_impl: bool,
search_index: Vec<IndexItem>,
stripped_mod: bool,
deref_trait_did: Option<DefId>,
deref_mut_trait_did: Option<DefId>,
// In rare case where a structure is defined in one module but implemented
// in another, if the implementing module is parsed before defining module,
// then the fully qualified name of the structure isn't presented in `paths`
// yet when its implementation methods are being indexed. Caches such methods
// and their parent id here and indexes them at the end of crate parsing.
orphan_impl_items: Vec<(DefId, clean::Item)>,
}
/// Temporary storage for data obtained during `RustdocVisitor::clean()`.
/// Later on moved into `CACHE_KEY`.
#[derive(Default)]
pub struct RenderInfo {
pub inlined: FxHashSet<DefId>,
pub external_paths: ::core::ExternalPaths,
pub external_typarams: FxHashMap<DefId, String>,
pub deref_trait_did: Option<DefId>,
pub deref_mut_trait_did: Option<DefId>,
}
/// Helper struct to render all source code to HTML pages
struct SourceCollector<'a> {
scx: &'a mut SharedContext,
/// Root destination to place all HTML output into
dst: PathBuf,
}
/// Wrapper struct to render the source code of a file. This will do things like
/// adding line numbers to the left-hand side.
struct Source<'a>(&'a str);
// Helper structs for rendering items/sidebars and carrying along contextual
// information
#[derive(Copy, Clone)]
struct Item<'a> {
cx: &'a Context,
item: &'a clean::Item,
}
struct Sidebar<'a> { cx: &'a Context, item: &'a clean::Item, }
/// Struct representing one entry in the JS search index. These are all emitted
/// by hand to a large JS file at the end of cache-creation.
struct IndexItem {
ty: ItemType,
name: String,
path: String,
desc: String,
parent: Option<DefId>,
parent_idx: Option<usize>,
search_type: Option<IndexItemFunctionType>,
}
impl ToJson for IndexItem {
fn to_json(&self) -> Json {
assert_eq!(self.parent.is_some(), self.parent_idx.is_some());
let mut data = Vec::with_capacity(6);
data.push((self.ty as usize).to_json());
data.push(self.name.to_json());
data.push(self.path.to_json());
data.push(self.desc.to_json());
data.push(self.parent_idx.to_json());
data.push(self.search_type.to_json());
Json::Array(data)
}
}
/// A type used for the search index.
struct Type {
name: Option<String>,
}
impl ToJson for Type {
fn to_json(&self) -> Json {
match self.name {
Some(ref name) => {
let mut data = BTreeMap::new();
data.insert("name".to_owned(), name.to_json());
Json::Object(data)
},
None => Json::Null
}
}
}
/// Full type of functions/methods in the search index.
struct IndexItemFunctionType {
inputs: Vec<Type>,
output: Option<Type>
}
impl ToJson for IndexItemFunctionType {
fn to_json(&self) -> Json {
// If we couldn't figure out a type, just write `null`.
if self.inputs.iter().chain(self.output.iter()).any(|ref i| i.name.is_none()) {
Json::Null
} else {
let mut data = BTreeMap::new();
data.insert("inputs".to_owned(), self.inputs.to_json());
data.insert("output".to_owned(), self.output.to_json());
Json::Object(data)
}
}
}
// TLS keys used to carry information around during rendering.
thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default());
thread_local!(pub static CURRENT_LOCATION_KEY: RefCell<Vec<String>> =
RefCell::new(Vec::new()));
thread_local!(static USED_ID_MAP: RefCell<FxHashMap<String, usize>> =
RefCell::new(init_ids()));
fn init_ids() -> FxHashMap<String, usize> {
[
"main",
"search",
"help",
"TOC",
"render-detail",
"associated-types",
"associated-const",
"required-methods",
"provided-methods",
"implementors",
"implementors-list",
"methods",
"deref-methods",
"implementations",
].into_iter().map(|id| (String::from(*id), 1)).collect()
}
/// This method resets the local table of used ID attributes. This is typically
/// used at the beginning of rendering an entire HTML page to reset from the
/// previous state (if any).
pub fn reset_ids(embedded: bool) {
USED_ID_MAP.with(|s| {
*s.borrow_mut() = if embedded {
init_ids()
} else {
FxHashMap()
};
});
}
pub fn derive_id(candidate: String) -> String {
USED_ID_MAP.with(|map| {
let id = match map.borrow_mut().get_mut(&candidate) {
None => candidate,
Some(a) => {
let id = format!("{}-{}", candidate, *a);
*a += 1;
id
}
};
map.borrow_mut().insert(id.clone(), 1);
id
})
}
/// Generates the documentation for `crate` into the directory `dst`
pub fn run(mut krate: clean::Crate,
external_html: &ExternalHtml,
playground_url: Option<String>,
dst: PathBuf,
passes: FxHashSet<String>,
css_file_extension: Option<PathBuf>,
renderinfo: RenderInfo) -> Result<(), Error> {
let src_root = match krate.src.parent() {
Some(p) => p.to_path_buf(),
None => PathBuf::new(),
};
let mut scx = SharedContext {
src_root: src_root,
passes: passes,
include_sources: true,
local_sources: FxHashMap(),
issue_tracker_base_url: None,
layout: layout::Layout {
logo: "".to_string(),
favicon: "".to_string(),
external_html: external_html.clone(),
krate: krate.name.clone(),
},
css_file_extension: css_file_extension.clone(),
};
// If user passed in `--playground-url` arg, we fill in crate name here
if let Some(url) = playground_url {
markdown::PLAYGROUND.with(|slot| {
*slot.borrow_mut() = Some((Some(krate.name.clone()), url));
});
}
// Crawl the crate attributes looking for attributes which control how we're
// going to emit HTML
if let Some(attrs) = krate.module.as_ref().map(|m| &m.attrs) {
for attr in attrs.lists("doc") {
let name = attr.name().map(|s| s.as_str());
match (name.as_ref().map(|s| &s[..]), attr.value_str()) {
(Some("html_favicon_url"), Some(s)) => {
scx.layout.favicon = s.to_string();
}
(Some("html_logo_url"), Some(s)) => {
scx.layout.logo = s.to_string();
}
(Some("html_playground_url"), Some(s)) => {
markdown::PLAYGROUND.with(|slot| {
let name = krate.name.clone();
*slot.borrow_mut() = Some((Some(name), s.to_string()));
});
}
(Some("issue_tracker_base_url"), Some(s)) => {
scx.issue_tracker_base_url = Some(s.to_string());
}
(Some("html_no_source"), None) if attr.is_word() => {
scx.include_sources = false;
}
_ => {}
}
}
}
try_err!(mkdir(&dst), &dst);
krate = render_sources(&dst, &mut scx, krate)?;
let cx = Context {
current: Vec::new(),
dst: dst,
render_redirect_pages: false,
shared: Arc::new(scx),
};
// Crawl the crate to build various caches used for the output
let RenderInfo {
inlined: _,
external_paths,
external_typarams,
deref_trait_did,
deref_mut_trait_did,
} = renderinfo;
let external_paths = external_paths.into_iter()
.map(|(k, (v, t))| (k, (v, ItemType::from(t))))
.collect();
let mut cache = Cache {
impls: FxHashMap(),
external_paths: external_paths,
paths: FxHashMap(),
implementors: FxHashMap(),
stack: Vec::new(),
parent_stack: Vec::new(),
search_index: Vec::new(),
parent_is_trait_impl: false,
extern_locations: FxHashMap(),
primitive_locations: FxHashMap(),
stripped_mod: false,
access_levels: krate.access_levels.clone(),
orphan_impl_items: Vec::new(),
traits: mem::replace(&mut krate.external_traits, FxHashMap()),
deref_trait_did: deref_trait_did,
deref_mut_trait_did: deref_mut_trait_did,
typarams: external_typarams,
};
// Cache where all our extern crates are located
for &(n, ref e) in &krate.externs {
let src_root = match Path::new(&e.src).parent() {
Some(p) => p.to_path_buf(),
None => PathBuf::new(),
};
cache.extern_locations.insert(n, (e.name.clone(), src_root,
extern_location(e, &cx.dst)));
let did = DefId { krate: n, index: CRATE_DEF_INDEX };
cache.external_paths.insert(did, (vec![e.name.to_string()], ItemType::Module));
}
// Cache where all known primitives have their documentation located.
//
// Favor linking to as local extern as possible, so iterate all crates in
// reverse topological order.
for &(_, ref e) in krate.externs.iter().rev() {
for &(def_id, prim, _) in &e.primitives {
cache.primitive_locations.insert(prim, def_id);
}
}
for &(def_id, prim, _) in &krate.primitives {
cache.primitive_locations.insert(prim, def_id);
}
cache.stack.push(krate.name.clone());
krate = cache.fold_crate(krate);
// Build our search index
let index = build_index(&krate, &mut cache);
// Freeze the cache now that the index has been built. Put an Arc into TLS
// for future parallelization opportunities
let cache = Arc::new(cache);
CACHE_KEY.with(|v| *v.borrow_mut() = cache.clone());
CURRENT_LOCATION_KEY.with(|s| s.borrow_mut().clear());
write_shared(&cx, &krate, &*cache, index)?;
// And finally render the whole crate's documentation
cx.krate(krate)
}
/// Build the search index from the collected metadata
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
let mut nodeid_to_pathid = FxHashMap();
let mut crate_items = Vec::with_capacity(cache.search_index.len());
let mut crate_paths = Vec::<Json>::new();
let Cache { ref mut search_index,
ref orphan_impl_items,
ref mut paths, .. } = *cache;
// Attach all orphan items to the type's definition if the type
// has since been learned.
for &(did, ref item) in orphan_impl_items {
if let Some(&(ref fqp, _)) = paths.get(&did) {
search_index.push(IndexItem {
ty: item.type_(),
name: item.name.clone().unwrap(),
path: fqp[..fqp.len() - 1].join("::"),
desc: plain_summary_line(item.doc_value()),
parent: Some(did),
parent_idx: None,
search_type: get_index_search_type(&item),
});
}
}
// Reduce `NodeId` in paths into smaller sequential numbers,
// and prune the paths that do not appear in the index.
let mut lastpath = String::new();
let mut lastpathid = 0usize;
for item in search_index {
item.parent_idx = item.parent.map(|nodeid| {
if nodeid_to_pathid.contains_key(&nodeid) {
*nodeid_to_pathid.get(&nodeid).unwrap()
} else {
let pathid = lastpathid;
nodeid_to_pathid.insert(nodeid, pathid);
lastpathid += 1;
let &(ref fqp, short) = paths.get(&nodeid).unwrap();
crate_paths.push(((short as usize), fqp.last().unwrap().clone()).to_json());
pathid
}
});
// Omit the parent path if it is same to that of the prior item.
if lastpath == item.path {
item.path.clear();
} else {
lastpath = item.path.clone();
}
crate_items.push(item.to_json());
}
let crate_doc = krate.module.as_ref().map(|module| {
plain_summary_line(module.doc_value())
}).unwrap_or(String::new());
let mut crate_data = BTreeMap::new();
crate_data.insert("doc".to_owned(), Json::String(crate_doc));
crate_data.insert("items".to_owned(), Json::Array(crate_items));
crate_data.insert("paths".to_owned(), Json::Array(crate_paths));
// Collect the index into a string
format!("searchIndex[{}] = {};",
as_json(&krate.name),
Json::Object(crate_data))
}
fn write_shared(cx: &Context,
krate: &clean::Crate,
cache: &Cache,
search_index: String) -> Result<(), Error> {
// Write out the shared files. Note that these are shared among all rustdoc
// docs placed in the output directory, so this needs to be a synchronized
// operation with respect to all other rustdocs running around.
try_err!(mkdir(&cx.dst), &cx.dst);
let _lock = flock::Lock::panicking_new(&cx.dst.join(".lock"), true, true, true);
// Add all the static files. These may already exist, but we just
// overwrite them anyway to make sure that they're fresh and up-to-date.
write(cx.dst.join("jquery.js"),
include_bytes!("static/jquery-2.1.4.min.js"))?;
write(cx.dst.join("main.js"),
include_bytes!("static/main.js"))?;
write(cx.dst.join("rustdoc.css"),
include_bytes!("static/rustdoc.css"))?;
write(cx.dst.join("main.css"),
include_bytes!("static/styles/main.css"))?;
if let Some(ref css) = cx.shared.css_file_extension {
let mut content = String::new();
let css = css.as_path();
let mut f = try_err!(File::open(css), css);
try_err!(f.read_to_string(&mut content), css);
let css = cx.dst.join("theme.css");
let css = css.as_path();
let mut f = try_err!(File::create(css), css);
try_err!(write!(f, "{}", &content), css);
}
write(cx.dst.join("normalize.css"),
include_bytes!("static/normalize.css"))?;
write(cx.dst.join("FiraSans-Regular.woff"),
include_bytes!("static/FiraSans-Regular.woff"))?;
write(cx.dst.join("FiraSans-Medium.woff"),
include_bytes!("static/FiraSans-Medium.woff"))?;
write(cx.dst.join("FiraSans-LICENSE.txt"),
include_bytes!("static/FiraSans-LICENSE.txt"))?;
write(cx.dst.join("Heuristica-Italic.woff"),
include_bytes!("static/Heuristica-Italic.woff"))?;
write(cx.dst.join("Heuristica-LICENSE.txt"),
include_bytes!("static/Heuristica-LICENSE.txt"))?;
write(cx.dst.join("SourceSerifPro-Regular.woff"),
include_bytes!("static/SourceSerifPro-Regular.woff"))?;
write(cx.dst.join("SourceSerifPro-Bold.woff"),
include_bytes!("static/SourceSerifPro-Bold.woff"))?;
write(cx.dst.join("SourceSerifPro-LICENSE.txt"),
include_bytes!("static/SourceSerifPro-LICENSE.txt"))?;
write(cx.dst.join("SourceCodePro-Regular.woff"),
include_bytes!("static/SourceCodePro-Regular.woff"))?;
write(cx.dst.join("SourceCodePro-Semibold.woff"),
include_bytes!("static/SourceCodePro-Semibold.woff"))?;
write(cx.dst.join("SourceCodePro-LICENSE.txt"),
include_bytes!("static/SourceCodePro-LICENSE.txt"))?;
write(cx.dst.join("LICENSE-MIT.txt"),
include_bytes!("static/LICENSE-MIT.txt"))?;
write(cx.dst.join("LICENSE-APACHE.txt"),
include_bytes!("static/LICENSE-APACHE.txt"))?;
write(cx.dst.join("COPYRIGHT.txt"),
include_bytes!("static/COPYRIGHT.txt"))?;
fn collect(path: &Path, krate: &str,
key: &str) -> io::Result<Vec<String>> {
let mut ret = Vec::new();
if path.exists() {
for line in BufReader::new(File::open(path)?).lines() {
let line = line?;
if !line.starts_with(key) {
continue;
}
if line.starts_with(&format!(r#"{}["{}"]"#, key, krate)) {
continue;
}
ret.push(line.to_string());
}
}
Ok(ret)
}
// Update the search index
let dst = cx.dst.join("search-index.js");
let mut all_indexes = try_err!(collect(&dst, &krate.name, "searchIndex"), &dst);
all_indexes.push(search_index);
// Sort the indexes by crate so the file will be generated identically even
// with rustdoc running in parallel.
all_indexes.sort();
let mut w = try_err!(File::create(&dst), &dst);
try_err!(writeln!(&mut w, "var searchIndex = {{}};"), &dst);
for index in &all_indexes {
try_err!(writeln!(&mut w, "{}", *index), &dst);
}
try_err!(writeln!(&mut w, "initSearch(searchIndex);"), &dst);
// Update the list of all implementors for traits
let dst = cx.dst.join("implementors");
for (&did, imps) in &cache.implementors {
// Private modules can leak through to this phase of rustdoc, which
// could contain implementations for otherwise private types. In some
// rare cases we could find an implementation for an item which wasn't
// indexed, so we just skip this step in that case.
//
// FIXME: this is a vague explanation for why this can't be a `get`, in
// theory it should be...
let &(ref remote_path, remote_item_type) = match cache.paths.get(&did) {
Some(p) => p,
None => match cache.external_paths.get(&did) {
Some(p) => p,
None => continue,
}
};
let mut implementors = format!(r#"implementors["{}"] = ["#, krate.name);
for imp in imps {
// If the trait and implementation are in the same crate, then
// there's no need to emit information about it (there's inlining
// going on). If they're in different crates then the crate defining
// the trait will be interested in our implementation.
if imp.def_id.krate == did.krate { continue }
write!(implementors, r#""{}","#, imp.impl_).unwrap();
}
implementors.push_str("];");
let mut mydst = dst.clone();
for part in &remote_path[..remote_path.len() - 1] {
mydst.push(part);
}
try_err!(fs::create_dir_all(&mydst), &mydst);
mydst.push(&format!("{}.{}.js",
remote_item_type.css_class(),
remote_path[remote_path.len() - 1]));
let mut all_implementors = try_err!(collect(&mydst, &krate.name, "implementors"), &mydst);
all_implementors.push(implementors);
// Sort the implementors by crate so the file will be generated
// identically even with rustdoc running in parallel.
all_implementors.sort();
let mut f = try_err!(File::create(&mydst), &mydst);
try_err!(writeln!(&mut f, "(function() {{var implementors = {{}};"), &mydst);
for implementor in &all_implementors {
try_err!(writeln!(&mut f, "{}", *implementor), &mydst);
}
try_err!(writeln!(&mut f, "{}", r"
if (window.register_implementors) {
window.register_implementors(implementors);
} else {
window.pending_implementors = implementors;
}
"), &mydst);
try_err!(writeln!(&mut f, r"}})()"), &mydst);
}
Ok(())
}
fn render_sources(dst: &Path, scx: &mut SharedContext,
krate: clean::Crate) -> Result<clean::Crate, Error> {
info!("emitting source files");
let dst = dst.join("src");
try_err!(mkdir(&dst), &dst);
let dst = dst.join(&krate.name);
try_err!(mkdir(&dst), &dst);
let mut folder = SourceCollector {
dst: dst,
scx: scx,
};
Ok(folder.fold_crate(krate))
}
/// Writes the entire contents of a string to a destination, not attempting to
/// catch any errors.
fn write(dst: PathBuf, contents: &[u8]) -> Result<(), Error> {
Ok(try_err!(try_err!(File::create(&dst), &dst).write_all(contents), &dst))
}
/// Makes a directory on the filesystem, failing the thread if an error occurs
/// and skipping if the directory already exists.
///
/// Note that this also handles races as rustdoc is likely to be run
/// concurrently against another invocation.
fn mkdir(path: &Path) -> io::Result<()> {
match fs::create_dir(path) {
Ok(()) => Ok(()),
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()),
Err(e) => Err(e)
}
}
/// Takes a path to a source file and cleans the path to it. This canonicalizes
/// things like ".." to components which preserve the "top down" hierarchy of a
/// static HTML tree. Each component in the cleaned path will be passed as an
/// argument to `f`. The very last component of the path (ie the file name) will
/// be passed to `f` if `keep_filename` is true, and ignored otherwise.
// FIXME (#9639): The closure should deal with &[u8] instead of &str
// FIXME (#9639): This is too conservative, rejecting non-UTF-8 paths
fn clean_srcpath<F>(src_root: &Path, p: &Path, keep_filename: bool, mut f: F) where
F: FnMut(&str),
{
// make it relative, if possible
let p = p.strip_prefix(src_root).unwrap_or(p);
let mut iter = p.components().peekable();
while let Some(c) = iter.next() {
if !keep_filename && iter.peek().is_none() {
break;
}
match c {
Component::ParentDir => f("up"),
Component::Normal(c) => f(c.to_str().unwrap()),
_ => continue,
}
}
}
/// Attempts to find where an external crate is located, given that we're
/// rendering in to the specified source destination.
fn extern_location(e: &clean::ExternalCrate, dst: &Path) -> ExternalLocation {
// See if there's documentation generated into the local directory
let local_location = dst.join(&e.name);
if local_location.is_dir() {
return Local;
}
// Failing that, see if there's an attribute specifying where to find this
// external crate
e.attrs.lists("doc")
.filter(|a| a.check_name("html_root_url"))
.filter_map(|a| a.value_str())
.map(|url| {
let mut url = url.to_string();
if !url.ends_with("/") {
url.push('/')
}
Remote(url)
}).next().unwrap_or(Unknown) // Well, at least we tried.
}
impl<'a> DocFolder for SourceCollector<'a> {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If we're including source files, and we haven't seen this file yet,
// then we need to render it out to the filesystem.
if self.scx.include_sources
// skip all invalid spans
&& item.source.filename != ""
// skip non-local items
&& item.def_id.is_local()
// Macros from other libraries get special filenames which we can
// safely ignore.
&& !(item.source.filename.starts_with("<")
&& item.source.filename.ends_with("macros>")) {
// If it turns out that we couldn't read this file, then we probably
// can't read any of the files (generating html output from json or
// something like that), so just don't include sources for the
// entire crate. The other option is maintaining this mapping on a
// per-file basis, but that's probably not worth it...
self.scx
.include_sources = match self.emit_source(&item.source.filename) {
Ok(()) => true,
Err(e) => {
println!("warning: source code was requested to be rendered, \
but processing `{}` had an error: {}",
item.source.filename, e);
println!(" skipping rendering of source code");
false
}
};
}
self.fold_item_recur(item)
}
}
impl<'a> SourceCollector<'a> {
/// Renders the given filename into its corresponding HTML source file.
fn emit_source(&mut self, filename: &str) -> io::Result<()> {
let p = PathBuf::from(filename);
if self.scx.local_sources.contains_key(&p) {
// We've already emitted this source
return Ok(());
}
let mut contents = Vec::new();
File::open(&p).and_then(|mut f| f.read_to_end(&mut contents))?;
let contents = str::from_utf8(&contents).unwrap();
// Remove the utf-8 BOM if any
let contents = if contents.starts_with("\u{feff}") {
&contents[3..]
} else {
contents
};
// Create the intermediate directories
let mut cur = self.dst.clone();
let mut root_path = String::from("../../");
let mut href = String::new();
clean_srcpath(&self.scx.src_root, &p, false, |component| {
cur.push(component);
mkdir(&cur).unwrap();
root_path.push_str("../");
href.push_str(component);
href.push('/');
});
let mut fname = p.file_name().expect("source has no filename")
.to_os_string();
fname.push(".html");
cur.push(&fname);
href.push_str(&fname.to_string_lossy());
let mut w = BufWriter::new(File::create(&cur)?);
let title = format!("{} -- source", cur.file_name().unwrap()
.to_string_lossy());
let desc = format!("Source to the Rust file `{}`.", filename);
let page = layout::Page {
title: &title,
css_class: "source",
root_path: &root_path,
description: &desc,
keywords: BASIC_KEYWORDS,
};
layout::render(&mut w, &self.scx.layout,
&page, &(""), &Source(contents),
self.scx.css_file_extension.is_some())?;
w.flush()?;
self.scx.local_sources.insert(p, href);
Ok(())
}
}
impl DocFolder for Cache {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If this is a stripped module,
// we don't want it or its children in the search index.
let orig_stripped_mod = match item.inner {
clean::StrippedItem(box clean::ModuleItem(..)) => {
mem::replace(&mut self.stripped_mod, true)
}
_ => self.stripped_mod,
};
// Register any generics to their corresponding string. This is used
// when pretty-printing types.
if let Some(generics) = item.inner.generics() {
self.generics(generics);
}
// Propagate a trait method's documentation to all implementors of the
// trait.
if let clean::TraitItem(ref t) = item.inner {
self.traits.entry(item.def_id).or_insert_with(|| t.clone());
}
// Collect all the implementors of traits.
if let clean::ImplItem(ref i) = item.inner {
if let Some(did) = i.trait_.def_id() {
self.implementors.entry(did).or_insert(vec![]).push(Implementor {
def_id: item.def_id,
stability: item.stability.clone(),
impl_: i.clone(),
});
}
}
// Index this method for searching later on.
if let Some(ref s) = item.name {
let (parent, is_inherent_impl_item) = match item.inner {
clean::StrippedItem(..) => ((None, None), false),
clean::AssociatedConstItem(..) |
clean::TypedefItem(_, true) if self.parent_is_trait_impl => {
// skip associated items in trait impls
((None, None), false)
}
clean::AssociatedTypeItem(..) |
clean::TyMethodItem(..) |
clean::StructFieldItem(..) |
clean::VariantItem(..) => {
((Some(*self.parent_stack.last().unwrap()),
Some(&self.stack[..self.stack.len() - 1])),
false)
}
clean::MethodItem(..) | clean::AssociatedConstItem(..) => {
if self.parent_stack.is_empty() {
((None, None), false)
} else {
let last = self.parent_stack.last().unwrap();
let did = *last;
let path = match self.paths.get(&did) {
// The current stack not necessarily has correlation
// for where the type was defined. On the other
// hand, `paths` always has the right
// information if present.
Some(&(ref fqp, ItemType::Trait)) |
Some(&(ref fqp, ItemType::Struct)) |
Some(&(ref fqp, ItemType::Union)) |
Some(&(ref fqp, ItemType::Enum)) =>
Some(&fqp[..fqp.len() - 1]),
Some(..) => Some(&*self.stack),
None => None
};
((Some(*last), path), true)
}
}
_ => ((None, Some(&*self.stack)), false)
};
match parent {
(parent, Some(path)) if is_inherent_impl_item || (!self.stripped_mod) => {
debug_assert!(!item.is_stripped());
// A crate has a module at its root, containing all items,
// which should not be indexed. The crate-item itself is
// inserted later on when serializing the search-index.
if item.def_id.index != CRATE_DEF_INDEX {
self.search_index.push(IndexItem {
ty: item.type_(),
name: s.to_string(),
path: path.join("::").to_string(),
desc: plain_summary_line(item.doc_value()),
parent: parent,
parent_idx: None,
search_type: get_index_search_type(&item),
});
}
}
(Some(parent), None) if is_inherent_impl_item => {
// We have a parent, but we don't know where they're
// defined yet. Wait for later to index this item.
self.orphan_impl_items.push((parent, item.clone()));
}
_ => {}
}
}
// Keep track of the fully qualified path for this item.
let pushed = match item.name {
Some(ref n) if !n.is_empty() => {
self.stack.push(n.to_string());
true
}
_ => false,
};
match item.inner {
clean::StructItem(..) | clean::EnumItem(..) |
clean::TypedefItem(..) | clean::TraitItem(..) |
clean::FunctionItem(..) | clean::ModuleItem(..) |
clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) |
clean::ConstantItem(..) | clean::StaticItem(..) |
clean::UnionItem(..)
if !self.stripped_mod => {
// Reexported items mean that the same id can show up twice
// in the rustdoc ast that we're looking at. We know,
// however, that a reexported item doesn't show up in the
// `public_items` map, so we can skip inserting into the
// paths map if there was already an entry present and we're
// not a public item.
if
!self.paths.contains_key(&item.def_id) ||
self.access_levels.is_public(item.def_id)
{
self.paths.insert(item.def_id,
(self.stack.clone(), item.type_()));
}
}
// Link variants to their parent enum because pages aren't emitted
// for each variant.
clean::VariantItem(..) if !self.stripped_mod => {
let mut stack = self.stack.clone();
stack.pop();
self.paths.insert(item.def_id, (stack, ItemType::Enum));
}
clean::PrimitiveItem(..) if item.visibility.is_some() => {
self.paths.insert(item.def_id, (self.stack.clone(),
item.type_()));
}
_ => {}
}
// Maintain the parent stack
let orig_parent_is_trait_impl = self.parent_is_trait_impl;
let parent_pushed = match item.inner {
clean::TraitItem(..) | clean::EnumItem(..) |
clean::StructItem(..) | clean::UnionItem(..) => {
self.parent_stack.push(item.def_id);
self.parent_is_trait_impl = false;
true
}
clean::ImplItem(ref i) => {
self.parent_is_trait_impl = i.trait_.is_some();
match i.for_ {
clean::ResolvedPath{ did, .. } => {
self.parent_stack.push(did);
true
}
ref t => {
let prim_did = t.primitive_type().and_then(|t| {
self.primitive_locations.get(&t).cloned()
});
match prim_did {
Some(did) => {
self.parent_stack.push(did);
true
}
None => false,
}
}
}
}
_ => false
};
// Once we've recursively found all the generics, hoard off all the
// implementations elsewhere.
let ret = self.fold_item_recur(item).and_then(|item| {
if let clean::Item { inner: clean::ImplItem(_), .. } = item {
// Figure out the id of this impl. This may map to a
// primitive rather than always to a struct/enum.
// Note: matching twice to restrict the lifetime of the `i` borrow.
let did = if let clean::Item { inner: clean::ImplItem(ref i), .. } = item {
match i.for_ {
clean::ResolvedPath { did, .. } |
clean::BorrowedRef {
type_: box clean::ResolvedPath { did, .. }, ..
} => {
Some(did)
}
ref t => {
t.primitive_type().and_then(|t| {
self.primitive_locations.get(&t).cloned()
})
}
}
} else {
unreachable!()
};
if let Some(did) = did {
self.impls.entry(did).or_insert(vec![]).push(Impl {
impl_item: item,
});
}
None
} else {
Some(item)
}
});
if pushed { self.stack.pop().unwrap(); }
if parent_pushed { self.parent_stack.pop().unwrap(); }
self.stripped_mod = orig_stripped_mod;
self.parent_is_trait_impl = orig_parent_is_trait_impl;
ret
}
}
impl<'a> Cache {
fn generics(&mut self, generics: &clean::Generics) {
for typ in &generics.type_params {
self.typarams.insert(typ.did, typ.name.clone());
}
}
}
impl Context {
/// String representation of how to get back to the root path of the 'doc/'
/// folder in terms of a relative URL.
fn root_path(&self) -> String {
repeat("../").take(self.current.len()).collect::<String>()
}
/// Recurse in the directory structure and change the "root path" to make
/// sure it always points to the top (relatively).
fn recurse<T, F>(&mut self, s: String, f: F) -> T where
F: FnOnce(&mut Context) -> T,
{
if s.is_empty() {
panic!("Unexpected empty destination: {:?}", self.current);
}
let prev = self.dst.clone();
self.dst.push(&s);
self.current.push(s);
info!("Recursing into {}", self.dst.display());
let ret = f(self);
info!("Recursed; leaving {}", self.dst.display());
// Go back to where we were at
self.dst = prev;
self.current.pop().unwrap();
ret
}
/// Main method for rendering a crate.
///
/// This currently isn't parallelized, but it'd be pretty easy to add
/// parallelization to this function.
fn krate(self, mut krate: clean::Crate) -> Result<(), Error> {
let mut item = match krate.module.take() {
Some(i) => i,
None => return Ok(()),
};
item.name = Some(krate.name);
// Render the crate documentation
let mut work = vec![(self, item)];
while let Some((mut cx, item)) = work.pop() {
cx.item(item, |cx, item| {
work.push((cx.clone(), item))
})?
}
Ok(())
}
fn render_item(&self,
writer: &mut io::Write,
it: &clean::Item,
pushname: bool)
-> io::Result<()> {
// A little unfortunate that this is done like this, but it sure
// does make formatting *a lot* nicer.
CURRENT_LOCATION_KEY.with(|slot| {
*slot.borrow_mut() = self.current.clone();
});
let mut title = if it.is_primitive() {
// No need to include the namespace for primitive types
String::new()
} else {
self.current.join("::")
};
if pushname {
if !title.is_empty() {
title.push_str("::");
}
title.push_str(it.name.as_ref().unwrap());
}
title.push_str(" - Rust");
let tyname = it.type_().css_class();
let desc = if it.is_crate() {
format!("API documentation for the Rust `{}` crate.",
self.shared.layout.krate)
} else {
format!("API documentation for the Rust `{}` {} in crate `{}`.",
it.name.as_ref().unwrap(), tyname, self.shared.layout.krate)
};
let keywords = make_item_keywords(it);
let page = layout::Page {
css_class: tyname,
root_path: &self.root_path(),
title: &title,
description: &desc,
keywords: &keywords,
};
reset_ids(true);
if !self.render_redirect_pages {
layout::render(writer, &self.shared.layout, &page,
&Sidebar{ cx: self, item: it },
&Item{ cx: self, item: it },
self.shared.css_file_extension.is_some())?;
} else {
let mut url = self.root_path();
if let Some(&(ref names, ty)) = cache().paths.get(&it.def_id) {
for name in &names[..names.len() - 1] {
url.push_str(name);
url.push_str("/");
}
url.push_str(&item_path(ty, names.last().unwrap()));
layout::redirect(writer, &url)?;
}
}
Ok(())
}
/// Non-parallelized version of rendering an item. This will take the input
/// item, render its contents, and then invoke the specified closure with
/// all sub-items which need to be rendered.
///
/// The rendering driver uses this closure to queue up more work.
fn item<F>(&mut self, item: clean::Item, mut f: F) -> Result<(), Error> where
F: FnMut(&mut Context, clean::Item),
{
// Stripped modules survive the rustdoc passes (i.e. `strip-private`)
// if they contain impls for public types. These modules can also
// contain items such as publicly reexported structures.
//
// External crates will provide links to these structures, so
// these modules are recursed into, but not rendered normally
// (a flag on the context).
if !self.render_redirect_pages {
self.render_redirect_pages = maybe_ignore_item(&item);
}
if item.is_mod() {
// modules are special because they add a namespace. We also need to
// recurse into the items of the module as well.
let name = item.name.as_ref().unwrap().to_string();
let mut item = Some(item);
self.recurse(name, |this| {
let item = item.take().unwrap();
let mut buf = Vec::new();
this.render_item(&mut buf, &item, false).unwrap();
// buf will be empty if the module is stripped and there is no redirect for it
if !buf.is_empty() {
let joint_dst = this.dst.join("index.html");
try_err!(fs::create_dir_all(&this.dst), &this.dst);
let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(dst.write_all(&buf), &joint_dst);
}
let m = match item.inner {
clean::StrippedItem(box clean::ModuleItem(m)) |
clean::ModuleItem(m) => m,
_ => unreachable!()
};
// Render sidebar-items.js used throughout this module.
if !this.render_redirect_pages {
let items = this.build_sidebar_items(&m);
let js_dst = this.dst.join("sidebar-items.js");
let mut js_out = BufWriter::new(try_err!(File::create(&js_dst), &js_dst));
try_err!(write!(&mut js_out, "initSidebarItems({});",
as_json(&items)), &js_dst);
}
for item in m.items {
f(this,item);
}
Ok(())
})?;
} else if item.name.is_some() {
let mut buf = Vec::new();
self.render_item(&mut buf, &item, true).unwrap();
// buf will be empty if the item is stripped and there is no redirect for it
if !buf.is_empty() {
let name = item.name.as_ref().unwrap();
let item_type = item.type_();
let file_name = &item_path(item_type, name);
let joint_dst = self.dst.join(file_name);
try_err!(fs::create_dir_all(&self.dst), &self.dst);
let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(dst.write_all(&buf), &joint_dst);
// Redirect from a sane URL using the namespace to Rustdoc's
// URL for the page.
let redir_name = format!("{}.{}.html", name, item_type.name_space());
let redir_dst = self.dst.join(redir_name);
if let Ok(mut redirect_out) = OpenOptions::new().create_new(true)
.write(true)
.open(&redir_dst) {
try_err!(layout::redirect(&mut redirect_out, file_name), &redir_dst);
}
// If the item is a macro, redirect from the old macro URL (with !)
// to the new one (without).
// FIXME(#35705) remove this redirect.
if item_type == ItemType::Macro {
let redir_name = format!("{}.{}!.html", item_type, name);
let redir_dst = self.dst.join(redir_name);
let mut redirect_out = try_err!(File::create(&redir_dst), &redir_dst);
try_err!(layout::redirect(&mut redirect_out, file_name), &redir_dst);
}
}
}
Ok(())
}
fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> {
// BTreeMap instead of HashMap to get a sorted output
let mut map = BTreeMap::new();
for item in &m.items {
if maybe_ignore_item(item) { continue }
let short = item.type_().css_class();
let myname = match item.name {
None => continue,
Some(ref s) => s.to_string(),
};
let short = short.to_string();
map.entry(short).or_insert(vec![])
.push((myname, Some(plain_summary_line(item.doc_value()))));
}
for (_, items) in &mut map {
items.sort();
}
map
}
}
impl<'a> Item<'a> {
/// Generate a url appropriate for an `href` attribute back to the source of
/// this item.
///
/// The url generated, when clicked, will redirect the browser back to the
/// original source code.
///
/// If `None` is returned, then a source link couldn't be generated. This
/// may happen, for example, with externally inlined items where the source
/// of their crate documentation isn't known.
fn src_href(&self) -> Option<String> {
let mut root = self.cx.root_path();
let cache = cache();
let mut path = String::new();
let (krate, path) = if self.item.def_id.is_local() {
let path = PathBuf::from(&self.item.source.filename);
if let Some(path) = self.cx.shared.local_sources.get(&path) {
(&self.cx.shared.layout.krate, path)
} else {
return None;
}
} else {
// Macros from other libraries get special filenames which we can
// safely ignore.
if self.item.source.filename.starts_with("<") &&
self.item.source.filename.ends_with("macros>") {
return None;
}
let (krate, src_root) = match cache.extern_locations.get(&self.item.def_id.krate) {
Some(&(ref name, ref src, Local)) => (name, src),
Some(&(ref name, ref src, Remote(ref s))) => {
root = s.to_string();
(name, src)
}
Some(&(_, _, Unknown)) | None => return None,
};
let file = Path::new(&self.item.source.filename);
clean_srcpath(&src_root, file, false, |component| {
path.push_str(component);
path.push('/');
});
let mut fname = file.file_name().expect("source has no filename")
.to_os_string();
fname.push(".html");
path.push_str(&fname.to_string_lossy());
(krate, &path)
};
let lines = if self.item.source.loline == self.item.source.hiline {
format!("{}", self.item.source.loline)
} else {
format!("{}-{}", self.item.source.loline, self.item.source.hiline)
};
Some(format!("{root}src/{krate}/{path}#{lines}",
root = root,
krate = krate,
path = path,
lines = lines))
}
}
impl<'a> fmt::Display for Item<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
debug_assert!(!self.item.is_stripped());
// Write the breadcrumb trail header for the top
write!(fmt, "\n<h1 class='fqn'><span class='in-band'>")?;
match self.item.inner {
clean::ModuleItem(ref m) => if m.is_crate {
write!(fmt, "Crate ")?;
} else {
write!(fmt, "Module ")?;
},
clean::FunctionItem(..) | clean::ForeignFunctionItem(..) =>
write!(fmt, "Function ")?,
clean::TraitItem(..) => write!(fmt, "Trait ")?,
clean::StructItem(..) => write!(fmt, "Struct ")?,
clean::UnionItem(..) => write!(fmt, "Union ")?,
clean::EnumItem(..) => write!(fmt, "Enum ")?,
clean::TypedefItem(..) => write!(fmt, "Type Definition ")?,
clean::MacroItem(..) => write!(fmt, "Macro ")?,
clean::PrimitiveItem(..) => write!(fmt, "Primitive Type ")?,
clean::StaticItem(..) | clean::ForeignStaticItem(..) =>
write!(fmt, "Static ")?,
clean::ConstantItem(..) => write!(fmt, "Constant ")?,
_ => {
// We don't generate pages for any other type.
unreachable!();
}
}
if !self.item.is_primitive() {
let cur = &self.cx.current;
let amt = if self.item.is_mod() { cur.len() - 1 } else { cur.len() };
for (i, component) in cur.iter().enumerate().take(amt) {
write!(fmt, "<a href='{}index.html'>{}</a>::<wbr>",
repeat("../").take(cur.len() - i - 1)
.collect::<String>(),
component)?;
}
}
write!(fmt, "<a class='{}' href=''>{}</a>",
self.item.type_(), self.item.name.as_ref().unwrap())?;
write!(fmt, "</span>")?; // in-band
write!(fmt, "<span class='out-of-band'>")?;
if let Some(version) = self.item.stable_since() {
write!(fmt, "<span class='since' title='Stable since Rust version {0}'>{0}</span>",
version)?;
}
write!(fmt,
r##"<span id='render-detail'>
<a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs">
[<span class='inner'>−</span>]
</a>
</span>"##)?;
// Write `src` tag
//
// When this item is part of a `pub use` in a downstream crate, the
// [src] link in the downstream documentation will actually come back to
// this page, and this link will be auto-clicked. The `id` attribute is
// used to find the link to auto-click.
if self.cx.shared.include_sources && !self.item.is_primitive() {
if let Some(l) = self.src_href() {
write!(fmt, "<a class='srclink' href='{}' title='{}'>[src]</a>",
l, "goto source code")?;
}
}
write!(fmt, "</span>")?; // out-of-band
write!(fmt, "</h1>\n")?;
match self.item.inner {
clean::ModuleItem(ref m) => {
item_module(fmt, self.cx, self.item, &m.items)
}
clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) =>
item_function(fmt, self.cx, self.item, f),
clean::TraitItem(ref t) => item_trait(fmt, self.cx, self.item, t),
clean::StructItem(ref s) => item_struct(fmt, self.cx, self.item, s),
clean::UnionItem(ref s) => item_union(fmt, self.cx, self.item, s),
clean::EnumItem(ref e) => item_enum(fmt, self.cx, self.item, e),
clean::TypedefItem(ref t, _) => item_typedef(fmt, self.cx, self.item, t),
clean::MacroItem(ref m) => item_macro(fmt, self.cx, self.item, m),
clean::PrimitiveItem(ref p) => item_primitive(fmt, self.cx, self.item, p),
clean::StaticItem(ref i) | clean::ForeignStaticItem(ref i) =>
item_static(fmt, self.cx, self.item, i),
clean::ConstantItem(ref c) => item_constant(fmt, self.cx, self.item, c),
_ => {
// We don't generate pages for any other type.
unreachable!();
}
}
}
}
fn item_path(ty: ItemType, name: &str) -> String {
match ty {
ItemType::Module => format!("{}/index.html", name),
_ => format!("{}.{}.html", ty.css_class(), name),
}
}
fn full_path(cx: &Context, item: &clean::Item) -> String {
let mut s = cx.current.join("::");
s.push_str("::");
s.push_str(item.name.as_ref().unwrap());
s
}
fn shorter<'a>(s: Option<&'a str>) -> String {
match s {
Some(s) => s.lines().take_while(|line|{
(*line).chars().any(|chr|{
!chr.is_whitespace()
})
}).collect::<Vec<_>>().join("\n"),
None => "".to_string()
}
}
#[inline]
fn plain_summary_line(s: Option<&str>) -> String {
let line = shorter(s).replace("\n", " ");
markdown::plain_summary_line(&line[..])
}
fn document(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Result {
document_stability(w, cx, item)?;
document_full(w, item)?;
Ok(())
}
fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLink) -> fmt::Result {
if let Some(s) = item.doc_value() {
let markdown = if s.contains('\n') {
format!("{} [Read more]({})",
&plain_summary_line(Some(s)), naive_assoc_href(item, link))
} else {
format!("{}", &plain_summary_line(Some(s)))
};
write!(w, "<div class='docblock'>{}</div>", Markdown(&markdown))?;
}
Ok(())
}
fn document_full(w: &mut fmt::Formatter, item: &clean::Item) -> fmt::Result {
if let Some(s) = item.doc_value() {
write!(w, "<div class='docblock'>{}</div>", Markdown(s))?;
}
Ok(())
}
fn document_stability(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Result {
let stabilities = short_stability(item, cx, true);
if !stabilities.is_empty() {
write!(w, "<div class='stability'>")?;
for stability in stabilities {
write!(w, "{}", stability)?;
}
write!(w, "</div>")?;
}
Ok(())
}
fn item_module(w: &mut fmt::Formatter, cx: &Context,
item: &clean::Item, items: &[clean::Item]) -> fmt::Result {
document(w, cx, item)?;
let mut indices = (0..items.len()).filter(|i| {
if let clean::DefaultImplItem(..) = items[*i].inner {
return false;
}
!maybe_ignore_item(&items[*i])
}).collect::<Vec<usize>>();
// the order of item types in the listing
fn reorder(ty: ItemType) -> u8 {
match ty {
ItemType::ExternCrate => 0,
ItemType::Import => 1,
ItemType::Primitive => 2,
ItemType::Module => 3,
ItemType::Macro => 4,
ItemType::Struct => 5,
ItemType::Enum => 6,
ItemType::Constant => 7,
ItemType::Static => 8,
ItemType::Trait => 9,
ItemType::Function => 10,
ItemType::Typedef => 12,
ItemType::Union => 13,
_ => 14 + ty as u8,
}
}
fn cmp(i1: &clean::Item, i2: &clean::Item, idx1: usize, idx2: usize) -> Ordering {
let ty1 = i1.type_();
let ty2 = i2.type_();
if ty1 != ty2 {
return (reorder(ty1), idx1).cmp(&(reorder(ty2), idx2))
}
let s1 = i1.stability.as_ref().map(|s| s.level);
let s2 = i2.stability.as_ref().map(|s| s.level);
match (s1, s2) {
(Some(stability::Unstable), Some(stability::Stable)) => return Ordering::Greater,
(Some(stability::Stable), Some(stability::Unstable)) => return Ordering::Less,
_ => {}
}
i1.name.cmp(&i2.name)
}
indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2));
debug!("{:?}", indices);
let mut curty = None;
for &idx in &indices {
let myitem = &items[idx];
if myitem.is_stripped() {
continue;
}
let myty = Some(myitem.type_());
if curty == Some(ItemType::ExternCrate) && myty == Some(ItemType::Import) {
// Put `extern crate` and `use` re-exports in the same section.
curty = myty;
} else if myty != curty {
if curty.is_some() {
write!(w, "</table>")?;
}
curty = myty;
let (short, name) = match myty.unwrap() {
ItemType::ExternCrate |
ItemType::Import => ("reexports", "Reexports"),
ItemType::Module => ("modules", "Modules"),
ItemType::Struct => ("structs", "Structs"),
ItemType::Union => ("unions", "Unions"),
ItemType::Enum => ("enums", "Enums"),
ItemType::Function => ("functions", "Functions"),
ItemType::Typedef => ("types", "Type Definitions"),
ItemType::Static => ("statics", "Statics"),
ItemType::Constant => ("constants", "Constants"),
ItemType::Trait => ("traits", "Traits"),
ItemType::Impl => ("impls", "Implementations"),
ItemType::TyMethod => ("tymethods", "Type Methods"),
ItemType::Method => ("methods", "Methods"),
ItemType::StructField => ("fields", "Struct Fields"),
ItemType::Variant => ("variants", "Variants"),
ItemType::Macro => ("macros", "Macros"),
ItemType::Primitive => ("primitives", "Primitive Types"),
ItemType::AssociatedType => ("associated-types", "Associated Types"),
ItemType::AssociatedConst => ("associated-consts", "Associated Constants"),
};
write!(w, "<h2 id='{id}' class='section-header'>\
<a href=\"#{id}\">{name}</a></h2>\n<table>",
id = derive_id(short.to_owned()), name = name)?;
}
match myitem.inner {
clean::ExternCrateItem(ref name, ref src) => {
use html::format::HRef;
match *src {
Some(ref src) => {
write!(w, "<tr><td><code>{}extern crate {} as {};",
VisSpace(&myitem.visibility),
HRef::new(myitem.def_id, src),
name)?
}
None => {
write!(w, "<tr><td><code>{}extern crate {};",
VisSpace(&myitem.visibility),
HRef::new(myitem.def_id, name))?
}
}
write!(w, "</code></td></tr>")?;
}
clean::ImportItem(ref import) => {
write!(w, "<tr><td><code>{}{}</code></td></tr>",
VisSpace(&myitem.visibility), *import)?;
}
_ => {
if myitem.name.is_none() { continue }
let stabilities = short_stability(myitem, cx, false);
let stab_docs = if !stabilities.is_empty() {
stabilities.iter()
.map(|s| format!("[{}]", s))
.collect::<Vec<_>>()
.as_slice()
.join(" ")
} else {
String::new()
};
let mut unsafety_flag = "";
if let clean::FunctionItem(ref func) = myitem.inner {
if func.unsafety == hir::Unsafety::Unsafe {
unsafety_flag = "<a title='unsafe function' href='#'><sup>⚠</sup></a>";
}
}
let doc_value = myitem.doc_value().unwrap_or("");
write!(w, "
<tr class='{stab} module-item'>
<td><a class='{class}' href='{href}'
title='{title}'>{name}</a>{unsafety_flag}</td>
<td class='docblock-short'>
{stab_docs} {docs}
</td>
</tr>",
name = *myitem.name.as_ref().unwrap(),
stab_docs = stab_docs,
docs = shorter(Some(&Markdown(doc_value).to_string())),
class = myitem.type_(),
stab = myitem.stability_class(),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), myitem.name.as_ref().unwrap()),
title = full_path(cx, myitem))?;
}
}
}
if curty.is_some() {
write!(w, "</table>")?;
}
Ok(())
}
fn maybe_ignore_item(it: &clean::Item) -> bool {
match it.inner {
clean::StrippedItem(..) => true,
clean::ModuleItem(ref m) => {
it.doc_value().is_none() && m.items.is_empty()
&& it.visibility != Some(clean::Public)
},
_ => false,
}
}
fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Vec<String> {
let mut stability = vec![];
if let Some(stab) = item.stability.as_ref() {
let deprecated_reason = if show_reason && !stab.deprecated_reason.is_empty() {
format!(": {}", stab.deprecated_reason)
} else {
String::new()
};
if !stab.deprecated_since.is_empty() {
let since = if show_reason {
format!(" since {}", Escape(&stab.deprecated_since))
} else {
String::new()
};
let text = format!("Deprecated{}{}", since, Markdown(&deprecated_reason));
stability.push(format!("<div class='stab deprecated'>{}</div>", text))
};
if stab.level == stability::Unstable {
let unstable_extra = if show_reason {
match (!stab.feature.is_empty(), &cx.shared.issue_tracker_base_url, stab.issue) {
(true, &Some(ref tracker_url), Some(issue_no)) if issue_no > 0 =>
format!(" (<code>{}</code> <a href=\"{}{}\">#{}</a>)",
Escape(&stab.feature), tracker_url, issue_no, issue_no),
(false, &Some(ref tracker_url), Some(issue_no)) if issue_no > 0 =>
format!(" (<a href=\"{}{}\">#{}</a>)", Escape(&tracker_url), issue_no,
issue_no),
(true, ..) =>
format!(" (<code>{}</code>)", Escape(&stab.feature)),
_ => String::new(),
}
} else {
String::new()
};
let unstable_reason = if show_reason && !stab.unstable_reason.is_empty() {
format!(": {}", stab.unstable_reason)
} else {
String::new()
};
let text = format!("Unstable{}{}", unstable_extra, Markdown(&unstable_reason));
stability.push(format!("<div class='stab unstable'>{}</div>", text))
};
} else if let Some(depr) = item.deprecation.as_ref() {
let note = if show_reason && !depr.note.is_empty() {
format!(": {}", depr.note)
} else {
String::new()
};
let since = if show_reason && !depr.since.is_empty() {
format!(" since {}", Escape(&depr.since))
} else {
String::new()
};
let text = format!("Deprecated{}{}", since, Markdown(¬e));
stability.push(format!("<div class='stab deprecated'>{}</div>", text))
}
stability
}
struct Initializer<'a>(&'a str);
impl<'a> fmt::Display for Initializer<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Initializer(s) = *self;
if s.is_empty() { return Ok(()); }
write!(f, "<code> = </code>")?;
write!(f, "<code>{}</code>", Escape(s))
}
}
fn item_constant(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
c: &clean::Constant) -> fmt::Result {
write!(w, "<pre class='rust const'>{vis}const \
{name}: {typ}{init}</pre>",
vis = VisSpace(&it.visibility),
name = it.name.as_ref().unwrap(),
typ = c.type_,
init = Initializer(&c.expr))?;
document(w, cx, it)
}
fn item_static(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
s: &clean::Static) -> fmt::Result {
write!(w, "<pre class='rust static'>{vis}static {mutability}\
{name}: {typ}{init}</pre>",
vis = VisSpace(&it.visibility),
mutability = MutableSpace(s.mutability),
name = it.name.as_ref().unwrap(),
typ = s.type_,
init = Initializer(&s.expr))?;
document(w, cx, it)
}
fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
f: &clean::Function) -> fmt::Result {
// FIXME(#24111): remove when `const_fn` is stabilized
let vis_constness = match UnstableFeatures::from_environment() {
UnstableFeatures::Allow => f.constness,
_ => hir::Constness::NotConst
};
let indent = format!("{}{}{}{:#}fn {}{:#}",
VisSpace(&it.visibility),
ConstnessSpace(vis_constness),
UnsafetySpace(f.unsafety),
AbiSpace(f.abi),
it.name.as_ref().unwrap(),
f.generics).len();
write!(w, "<pre class='rust fn'>{vis}{constness}{unsafety}{abi}fn \
{name}{generics}{decl}{where_clause}</pre>",
vis = VisSpace(&it.visibility),
constness = ConstnessSpace(vis_constness),
unsafety = UnsafetySpace(f.unsafety),
abi = AbiSpace(f.abi),
name = it.name.as_ref().unwrap(),
generics = f.generics,
where_clause = WhereClause(&f.generics, 2),
decl = Method(&f.decl, indent))?;
document(w, cx, it)
}
fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
t: &clean::Trait) -> fmt::Result {
let mut bounds = String::new();
let mut bounds_plain = String::new();
if !t.bounds.is_empty() {
if !bounds.is_empty() {
bounds.push(' ');
bounds_plain.push(' ');
}
bounds.push_str(": ");
bounds_plain.push_str(": ");
for (i, p) in t.bounds.iter().enumerate() {
if i > 0 {
bounds.push_str(" + ");
bounds_plain.push_str(" + ");
}
bounds.push_str(&format!("{}", *p));
bounds_plain.push_str(&format!("{:#}", *p));
}
}
// Output the trait definition
write!(w, "<pre class='rust trait'>{}{}trait {}{}{}{} ",
VisSpace(&it.visibility),
UnsafetySpace(t.unsafety),
it.name.as_ref().unwrap(),
t.generics,
bounds,
// Where clauses in traits are indented nine spaces, per rustdoc.css
WhereClause(&t.generics, 9))?;
let types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>();
let consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>();
let required = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>();
let provided = t.items.iter().filter(|m| m.is_method()).collect::<Vec<_>>();
if t.items.is_empty() {
write!(w, "{{ }}")?;
} else {
// FIXME: we should be using a derived_id for the Anchors here
write!(w, "{{\n")?;
for t in &types {
write!(w, " ")?;
render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, ";\n")?;
}
if !types.is_empty() && !consts.is_empty() {
w.write_str("\n")?;
}
for t in &consts {
write!(w, " ")?;
render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, ";\n")?;
}
if !consts.is_empty() && !required.is_empty() {
w.write_str("\n")?;
}
for m in &required {
write!(w, " ")?;
render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, ";\n")?;
}
if !required.is_empty() && !provided.is_empty() {
w.write_str("\n")?;
}
for m in &provided {
write!(w, " ")?;
render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, " {{ ... }}\n")?;
}
write!(w, "}}")?;
}
write!(w, "</pre>")?;
// Trait documentation
document(w, cx, it)?;
fn trait_item(w: &mut fmt::Formatter, cx: &Context, m: &clean::Item, t: &clean::Item)
-> fmt::Result {
let name = m.name.as_ref().unwrap();
let item_type = m.type_();
let id = derive_id(format!("{}.{}", item_type, name));
let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h3 id='{id}' class='method'>\
<span id='{ns_id}' class='invisible'><code>",
id = id,
ns_id = ns_id)?;
render_assoc_item(w, m, AssocItemLink::Anchor(Some(&id)), ItemType::Impl)?;
write!(w, "</code>")?;
render_stability_since(w, m, t)?;
write!(w, "</span></h3>")?;
document(w, cx, m)?;
Ok(())
}
if !types.is_empty() {
write!(w, "
<h2 id='associated-types'>Associated Types</h2>
<div class='methods'>
")?;
for t in &types {
trait_item(w, cx, *t, it)?;
}
write!(w, "</div>")?;
}
if !consts.is_empty() {
write!(w, "
<h2 id='associated-const'>Associated Constants</h2>
<div class='methods'>
")?;
for t in &consts {
trait_item(w, cx, *t, it)?;
}
write!(w, "</div>")?;
}
// Output the documentation for each function individually
if !required.is_empty() {
write!(w, "
<h2 id='required-methods'>Required Methods</h2>
<div class='methods'>
")?;
for m in &required {
trait_item(w, cx, *m, it)?;
}
write!(w, "</div>")?;
}
if !provided.is_empty() {
write!(w, "
<h2 id='provided-methods'>Provided Methods</h2>
<div class='methods'>
")?;
for m in &provided {
trait_item(w, cx, *m, it)?;
}
write!(w, "</div>")?;
}
// If there are methods directly on this trait object, render them here.
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)?;
let cache = cache();
write!(w, "
<h2 id='implementors'>Implementors</h2>
<ul class='item-list' id='implementors-list'>
")?;
if let Some(implementors) = cache.implementors.get(&it.def_id) {
for i in implementors {
write!(w, "<li><code>")?;
fmt_impl_for_trait_page(&i.impl_, w)?;
writeln!(w, "</code></li>")?;
}
}
write!(w, "</ul>")?;
write!(w, r#"<script type="text/javascript" async
src="{root_path}/implementors/{path}/{ty}.{name}.js">
</script>"#,
root_path = vec![".."; cx.current.len()].join("/"),
path = if it.def_id.is_local() {
cx.current.join("/")
} else {
let (ref path, _) = cache.external_paths[&it.def_id];
path[..path.len() - 1].join("/")
},
ty = it.type_().css_class(),
name = *it.name.as_ref().unwrap())?;
Ok(())
}
fn naive_assoc_href(it: &clean::Item, link: AssocItemLink) -> String {
use html::item_type::ItemType::*;
let name = it.name.as_ref().unwrap();
let ty = match it.type_() {
Typedef | AssociatedType => AssociatedType,
s@_ => s,
};
let anchor = format!("#{}.{}", ty, name);
match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
AssocItemLink::Anchor(None) => anchor,
AssocItemLink::GotoSource(did, _) => {
href(did).map(|p| format!("{}{}", p.0, anchor)).unwrap_or(anchor)
}
}
}
fn assoc_const(w: &mut fmt::Formatter,
it: &clean::Item,
ty: &clean::Type,
default: Option<&String>,
link: AssocItemLink) -> fmt::Result {
write!(w, "const <a href='{}' class='constant'>{}</a>",
naive_assoc_href(it, link),
it.name.as_ref().unwrap())?;
write!(w, ": {}", ty)?;
if let Some(default) = default {
write!(w, " = {}", Escape(default))?;
}
Ok(())
}
fn assoc_type(w: &mut fmt::Formatter, it: &clean::Item,
bounds: &Vec<clean::TyParamBound>,
default: Option<&clean::Type>,
link: AssocItemLink) -> fmt::Result {
write!(w, "type <a href='{}' class='type'>{}</a>",
naive_assoc_href(it, link),
it.name.as_ref().unwrap())?;
if !bounds.is_empty() {
write!(w, ": {}", TyParamBounds(bounds))?
}
if let Some(default) = default {
write!(w, " = {}", default)?;
}
Ok(())
}
fn render_stability_since_raw<'a>(w: &mut fmt::Formatter,
ver: Option<&'a str>,
containing_ver: Option<&'a str>) -> fmt::Result {
if let Some(v) = ver {
if containing_ver != ver && v.len() > 0 {
write!(w, "<div class='since' title='Stable since Rust version {0}'>{0}</div>",
v)?
}
}
Ok(())
}
fn render_stability_since(w: &mut fmt::Formatter,
item: &clean::Item,
containing_item: &clean::Item) -> fmt::Result {
render_stability_since_raw(w, item.stable_since(), containing_item.stable_since())
}
fn render_assoc_item(w: &mut fmt::Formatter,
item: &clean::Item,
link: AssocItemLink,
parent: ItemType) -> fmt::Result {
fn method(w: &mut fmt::Formatter,
meth: &clean::Item,
unsafety: hir::Unsafety,
constness: hir::Constness,
abi: abi::Abi,
g: &clean::Generics,
d: &clean::FnDecl,
link: AssocItemLink,
parent: ItemType)
-> fmt::Result {
let name = meth.name.as_ref().unwrap();
let anchor = format!("#{}.{}", meth.type_(), name);
let href = match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
AssocItemLink::Anchor(None) => anchor,
AssocItemLink::GotoSource(did, provided_methods) => {
// We're creating a link from an impl-item to the corresponding
// trait-item and need to map the anchored type accordingly.
let ty = if provided_methods.contains(name) {
ItemType::Method
} else {
ItemType::TyMethod
};
href(did).map(|p| format!("{}#{}.{}", p.0, ty, name)).unwrap_or(anchor)
}
};
// FIXME(#24111): remove when `const_fn` is stabilized
let vis_constness = match UnstableFeatures::from_environment() {
UnstableFeatures::Allow => constness,
_ => hir::Constness::NotConst
};
let prefix = format!("{}{}{:#}fn {}{:#}",
ConstnessSpace(vis_constness),
UnsafetySpace(unsafety),
AbiSpace(abi),
name,
*g);
let mut indent = prefix.len();
let where_indent = if parent == ItemType::Trait {
indent += 4;
8
} else if parent == ItemType::Impl {
2
} else {
let prefix = prefix + &format!("{:#}", Method(d, indent));
prefix.lines().last().unwrap().len() + 1
};
write!(w, "{}{}{}fn <a href='{href}' class='fnname'>{name}</a>\
{generics}{decl}{where_clause}",
ConstnessSpace(vis_constness),
UnsafetySpace(unsafety),
AbiSpace(abi),
href = href,
name = name,
generics = *g,
decl = Method(d, indent),
where_clause = WhereClause(g, where_indent))
}
match item.inner {
clean::StrippedItem(..) => Ok(()),
clean::TyMethodItem(ref m) => {
method(w, item, m.unsafety, hir::Constness::NotConst,
m.abi, &m.generics, &m.decl, link, parent)
}
clean::MethodItem(ref m) => {
method(w, item, m.unsafety, m.constness,
m.abi, &m.generics, &m.decl, link, parent)
}
clean::AssociatedConstItem(ref ty, ref default) => {
assoc_const(w, item, ty, default.as_ref(), link)
}
clean::AssociatedTypeItem(ref bounds, ref default) => {
assoc_type(w, item, bounds, default.as_ref(), link)
}
_ => panic!("render_assoc_item called on non-associated-item")
}
}
fn item_struct(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
s: &clean::Struct) -> fmt::Result {
write!(w, "<pre class='rust struct'>")?;
render_attributes(w, it)?;
render_struct(w,
it,
Some(&s.generics),
s.struct_type,
&s.fields,
"",
true)?;
write!(w, "</pre>")?;
document(w, cx, it)?;
let mut fields = s.fields.iter().filter_map(|f| {
match f.inner {
clean::StructFieldItem(ref ty) => Some((f, ty)),
_ => None,
}
}).peekable();
if let doctree::Plain = s.struct_type {
if fields.peek().is_some() {
write!(w, "<h2 class='fields'>Fields</h2>")?;
for (field, ty) in fields {
let id = derive_id(format!("{}.{}",
ItemType::StructField,
field.name.as_ref().unwrap()));
let ns_id = derive_id(format!("{}.{}",
field.name.as_ref().unwrap(),
ItemType::StructField.name_space()));
write!(w, "<span id='{id}' class='{item_type}'>
<span id='{ns_id}' class='invisible'>
<code>{name}: {ty}</code>
</span></span><span class='stab {stab}'></span>",
item_type = ItemType::StructField,
id = id,
ns_id = ns_id,
stab = field.stability_class(),
name = field.name.as_ref().unwrap(),
ty = ty)?;
document(w, cx, field)?;
}
}
}
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_union(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
s: &clean::Union) -> fmt::Result {
write!(w, "<pre class='rust union'>")?;
render_attributes(w, it)?;
render_union(w,
it,
Some(&s.generics),
&s.fields,
"",
true)?;
write!(w, "</pre>")?;
document(w, cx, it)?;
let mut fields = s.fields.iter().filter_map(|f| {
match f.inner {
clean::StructFieldItem(ref ty) => Some((f, ty)),
_ => None,
}
}).peekable();
if fields.peek().is_some() {
write!(w, "<h2 class='fields'>Fields</h2>")?;
for (field, ty) in fields {
write!(w, "<span id='{shortty}.{name}' class='{shortty}'><code>{name}: {ty}</code>
</span><span class='stab {stab}'></span>",
shortty = ItemType::StructField,
stab = field.stability_class(),
name = field.name.as_ref().unwrap(),
ty = ty)?;
document(w, cx, field)?;
}
}
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
e: &clean::Enum) -> fmt::Result {
write!(w, "<pre class='rust enum'>")?;
render_attributes(w, it)?;
let padding = format!("{}enum {}{:#} ",
VisSpace(&it.visibility),
it.name.as_ref().unwrap(),
e.generics).len();
write!(w, "{}enum {}{}{}",
VisSpace(&it.visibility),
it.name.as_ref().unwrap(),
e.generics,
WhereClause(&e.generics, padding))?;
if e.variants.is_empty() && !e.variants_stripped {
write!(w, " {{}}")?;
} else {
write!(w, " {{\n")?;
for v in &e.variants {
write!(w, " ")?;
let name = v.name.as_ref().unwrap();
match v.inner {
clean::VariantItem(ref var) => {
match var.kind {
clean::VariantKind::CLike => write!(w, "{}", name)?,
clean::VariantKind::Tuple(ref tys) => {
write!(w, "{}(", name)?;
for (i, ty) in tys.iter().enumerate() {
if i > 0 {
write!(w, ", ")?
}
write!(w, "{}", *ty)?;
}
write!(w, ")")?;
}
clean::VariantKind::Struct(ref s) => {
render_struct(w,
v,
None,
s.struct_type,
&s.fields,
" ",
false)?;
}
}
}
_ => unreachable!()
}
write!(w, ",\n")?;
}
if e.variants_stripped {
write!(w, " // some variants omitted\n")?;
}
write!(w, "}}")?;
}
write!(w, "</pre>")?;
document(w, cx, it)?;
if !e.variants.is_empty() {
write!(w, "<h2 class='variants'>Variants</h2>\n")?;
for variant in &e.variants {
let id = derive_id(format!("{}.{}",
ItemType::Variant,
variant.name.as_ref().unwrap()));
let ns_id = derive_id(format!("{}.{}",
variant.name.as_ref().unwrap(),
ItemType::Variant.name_space()));
write!(w, "<span id='{id}' class='variant'>\
<span id='{ns_id}' class='invisible'><code>{name}",
id = id,
ns_id = ns_id,
name = variant.name.as_ref().unwrap())?;
if let clean::VariantItem(ref var) = variant.inner {
if let clean::VariantKind::Tuple(ref tys) = var.kind {
write!(w, "(")?;
for (i, ty) in tys.iter().enumerate() {
if i > 0 {
write!(w, ", ")?;
}
write!(w, "{}", *ty)?;
}
write!(w, ")")?;
}
}
write!(w, "</code></span></span>")?;
document(w, cx, variant)?;
use clean::{Variant, VariantKind};
if let clean::VariantItem(Variant {
kind: VariantKind::Struct(ref s)
}) = variant.inner {
let variant_id = derive_id(format!("{}.{}.fields",
ItemType::Variant,
variant.name.as_ref().unwrap()));
write!(w, "<span class='docblock autohide sub-variant' id='{id}'>",
id = variant_id)?;
write!(w, "<h3 class='fields'>Fields of <code>{name}</code></h3>\n
<table>", name = variant.name.as_ref().unwrap())?;
for field in &s.fields {
use clean::StructFieldItem;
if let StructFieldItem(ref ty) = field.inner {
let id = derive_id(format!("variant.{}.field.{}",
variant.name.as_ref().unwrap(),
field.name.as_ref().unwrap()));
let ns_id = derive_id(format!("{}.{}.{}.{}",
variant.name.as_ref().unwrap(),
ItemType::Variant.name_space(),
field.name.as_ref().unwrap(),
ItemType::StructField.name_space()));
write!(w, "<tr><td \
id='{id}'>\
<span id='{ns_id}' class='invisible'>\
<code>{f}: {t}</code></span></td><td>",
id = id,
ns_id = ns_id,
f = field.name.as_ref().unwrap(),
t = *ty)?;
document(w, cx, field)?;
write!(w, "</td></tr>")?;
}
}
write!(w, "</table></span>")?;
}
render_stability_since(w, variant, it)?;
}
}
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)?;
Ok(())
}
fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
let name = attr.name();
if attr.is_word() {
Some(format!("{}", name))
} else if let Some(v) = attr.value_str() {
Some(format!("{} = {:?}", name, &v.as_str()[..]))
} else if let Some(values) = attr.meta_item_list() {
let display: Vec<_> = values.iter().filter_map(|attr| {
attr.meta_item().and_then(|mi| render_attribute(mi))
}).collect();
if display.len() > 0 {
Some(format!("{}({})", name, display.join(", ")))
} else {
None
}
} else {
None
}
}
const ATTRIBUTE_WHITELIST: &'static [&'static str] = &[
"export_name",
"lang",
"link_section",
"must_use",
"no_mangle",
"repr",
"unsafe_destructor_blind_to_params"
];
fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
let mut attrs = String::new();
for attr in &it.attrs.other_attrs {
let name = attr.name();
if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
continue;
}
if let Some(s) = render_attribute(attr.meta()) {
attrs.push_str(&format!("#[{}]\n", s));
}
}
if attrs.len() > 0 {
write!(w, "<div class=\"docblock attributes\">{}</div>", &attrs)?;
}
Ok(())
}
fn render_struct(w: &mut fmt::Formatter, it: &clean::Item,
g: Option<&clean::Generics>,
ty: doctree::StructType,
fields: &[clean::Item],
tab: &str,
structhead: bool) -> fmt::Result {
let mut plain = String::new();
write!(w, "{}{}{}",
VisSpace(&it.visibility),
if structhead {"struct "} else {""},
it.name.as_ref().unwrap())?;
plain.push_str(&format!("{}{}{}",
VisSpace(&it.visibility),
if structhead {"struct "} else {""},
it.name.as_ref().unwrap()));
if let Some(g) = g {
plain.push_str(&format!("{:#}", g));
write!(w, "{}", g)?
}
match ty {
doctree::Plain => {
if let Some(g) = g {
write!(w, "{}", WhereClause(g, plain.len() + 1))?
}
let mut has_visible_fields = false;
write!(w, " {{")?;
for field in fields {
if let clean::StructFieldItem(ref ty) = field.inner {
write!(w, "\n{} {}{}: {},",
tab,
VisSpace(&field.visibility),
field.name.as_ref().unwrap(),
*ty)?;
has_visible_fields = true;
}
}
if has_visible_fields {
if it.has_stripped_fields().unwrap() {
write!(w, "\n{} // some fields omitted", tab)?;
}
write!(w, "\n{}", tab)?;
} else if it.has_stripped_fields().unwrap() {
// If there are no visible fields we can just display
// `{ /* fields omitted */ }` to save space.
write!(w, " /* fields omitted */ ")?;
}
write!(w, "}}")?;
}
doctree::Tuple => {
write!(w, "(")?;
plain.push_str("(");
for (i, field) in fields.iter().enumerate() {
if i > 0 {
write!(w, ", ")?;
plain.push_str(", ");
}
match field.inner {
clean::StrippedItem(box clean::StructFieldItem(..)) => {
plain.push_str("_");
write!(w, "_")?
}
clean::StructFieldItem(ref ty) => {
plain.push_str(&format!("{}{:#}", VisSpace(&field.visibility), *ty));
write!(w, "{}{}", VisSpace(&field.visibility), *ty)?
}
_ => unreachable!()
}
}
write!(w, ")")?;
plain.push_str(")");
if let Some(g) = g {
write!(w, "{}", WhereClause(g, plain.len() + 1))?
}
write!(w, ";")?;
}
doctree::Unit => {
// Needed for PhantomData.
if let Some(g) = g {
write!(w, "{}", WhereClause(g, plain.len() + 1))?
}
write!(w, ";")?;
}
}
Ok(())
}
fn render_union(w: &mut fmt::Formatter, it: &clean::Item,
g: Option<&clean::Generics>,
fields: &[clean::Item],
tab: &str,
structhead: bool) -> fmt::Result {
let mut plain = String::new();
write!(w, "{}{}{}",
VisSpace(&it.visibility),
if structhead {"union "} else {""},
it.name.as_ref().unwrap())?;
plain.push_str(&format!("{}{}{}",
VisSpace(&it.visibility),
if structhead {"union "} else {""},
it.name.as_ref().unwrap()));
if let Some(g) = g {
write!(w, "{}", g)?;
plain.push_str(&format!("{:#}", g));
write!(w, "{}", WhereClause(g, plain.len() + 1))?;
}
write!(w, " {{\n{}", tab)?;
for field in fields {
if let clean::StructFieldItem(ref ty) = field.inner {
write!(w, " {}{}: {},\n{}",
VisSpace(&field.visibility),
field.name.as_ref().unwrap(),
*ty,
tab)?;
}
}
if it.has_stripped_fields().unwrap() {
write!(w, " // some fields omitted\n{}", tab)?;
}
write!(w, "}}")?;
Ok(())
}
#[derive(Copy, Clone)]
enum AssocItemLink<'a> {
Anchor(Option<&'a str>),
GotoSource(DefId, &'a FxHashSet<String>),
}
impl<'a> AssocItemLink<'a> {
fn anchor(&self, id: &'a String) -> Self {
match *self {
AssocItemLink::Anchor(_) => { AssocItemLink::Anchor(Some(&id)) },
ref other => *other,
}
}
}
enum AssocItemRender<'a> {
All,
DerefFor { trait_: &'a clean::Type, type_: &'a clean::Type, deref_mut_: bool }
}
#[derive(Copy, Clone, PartialEq)]
enum RenderMode {
Normal,
ForDeref { mut_: bool },
}
fn render_assoc_items(w: &mut fmt::Formatter,
cx: &Context,
containing_item: &clean::Item,
it: DefId,
what: AssocItemRender) -> fmt::Result {
let c = cache();
let v = match c.impls.get(&it) {
Some(v) => v,
None => return Ok(()),
};
let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| {
i.inner_impl().trait_.is_none()
});
if !non_trait.is_empty() {
let render_mode = match what {
AssocItemRender::All => {
write!(w, "<h2 id='methods'>Methods</h2>")?;
RenderMode::Normal
}
AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => {
write!(w, "<h2 id='deref-methods'>Methods from \
{}<Target={}></h2>", trait_, type_)?;
RenderMode::ForDeref { mut_: deref_mut_ }
}
};
for i in &non_trait {
render_impl(w, cx, i, AssocItemLink::Anchor(None), render_mode,
containing_item.stable_since())?;
}
}
if let AssocItemRender::DerefFor { .. } = what {
return Ok(());
}
if !traits.is_empty() {
let deref_impl = traits.iter().find(|t| {
t.inner_impl().trait_.def_id() == c.deref_trait_did
});
if let Some(impl_) = deref_impl {
let has_deref_mut = traits.iter().find(|t| {
t.inner_impl().trait_.def_id() == c.deref_mut_trait_did
}).is_some();
render_deref_methods(w, cx, impl_, containing_item, has_deref_mut)?;
}
write!(w, "<h2 id='implementations'>Trait \
Implementations</h2>")?;
for i in &traits {
let did = i.trait_did().unwrap();
let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods);
render_impl(w, cx, i, assoc_link,
RenderMode::Normal, containing_item.stable_since())?;
}
}
Ok(())
}
fn render_deref_methods(w: &mut fmt::Formatter, cx: &Context, impl_: &Impl,
container_item: &clean::Item, deref_mut: bool) -> fmt::Result {
let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
let target = impl_.inner_impl().items.iter().filter_map(|item| {
match item.inner {
clean::TypedefItem(ref t, true) => Some(&t.type_),
_ => None,
}
}).next().expect("Expected associated type binding");
let what = AssocItemRender::DerefFor { trait_: deref_type, type_: target,
deref_mut_: deref_mut };
if let Some(did) = target.def_id() {
render_assoc_items(w, cx, container_item, did, what)
} else {
if let Some(prim) = target.primitive_type() {
if let Some(&did) = cache().primitive_locations.get(&prim) {
render_assoc_items(w, cx, container_item, did, what)?;
}
}
Ok(())
}
}
fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLink,
render_mode: RenderMode, outer_version: Option<&str>) -> fmt::Result {
if render_mode == RenderMode::Normal {
write!(w, "<h3 class='impl'><span class='in-band'><code>{}</code>", i.inner_impl())?;
write!(w, "</span><span class='out-of-band'>")?;
let since = i.impl_item.stability.as_ref().map(|s| &s.since[..]);
if let Some(l) = (Item { item: &i.impl_item, cx: cx }).src_href() {
write!(w, "<div class='ghost'></div>")?;
render_stability_since_raw(w, since, outer_version)?;
write!(w, "<a class='srclink' href='{}' title='{}'>[src]</a>",
l, "goto source code")?;
} else {
render_stability_since_raw(w, since, outer_version)?;
}
write!(w, "</span>")?;
write!(w, "</h3>\n")?;
if let Some(ref dox) = i.impl_item.doc_value() {
write!(w, "<div class='docblock'>{}</div>", Markdown(dox))?;
}
}
fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
link: AssocItemLink, render_mode: RenderMode,
is_default_item: bool, outer_version: Option<&str>,
trait_: Option<&clean::Trait>) -> fmt::Result {
let item_type = item.type_();
let name = item.name.as_ref().unwrap();
let render_method_item: bool = match render_mode {
RenderMode::Normal => true,
RenderMode::ForDeref { mut_: deref_mut_ } => {
let self_type_opt = match item.inner {
clean::MethodItem(ref method) => method.decl.self_type(),
clean::TyMethodItem(ref method) => method.decl.self_type(),
_ => None
};
if let Some(self_ty) = self_type_opt {
let by_mut_ref = match self_ty {
SelfTy::SelfBorrowed(_lifetime, mutability) => {
mutability == Mutability::Mutable
},
SelfTy::SelfExplicit(clean::BorrowedRef { mutability, .. }) => {
mutability == Mutability::Mutable
},
_ => false,
};
deref_mut_ || !by_mut_ref
} else {
false
}
},
};
match item.inner {
clean::MethodItem(..) | clean::TyMethodItem(..) => {
// Only render when the method is not static or we allow static methods
if render_method_item {
let id = derive_id(format!("{}.{}", item_type, name));
let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
write!(w, "<span id='{}' class='invisible'>", ns_id)?;
write!(w, "<code>")?;
render_assoc_item(w, item, link.anchor(&id), ItemType::Impl)?;
write!(w, "</code>")?;
render_stability_since_raw(w, item.stable_since(), outer_version)?;
write!(w, "</span></h4>\n")?;
}
}
clean::TypedefItem(ref tydef, _) => {
let id = derive_id(format!("{}.{}", ItemType::AssociatedType, name));
let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id))?;
write!(w, "</code></span></h4>\n")?;
}
clean::AssociatedConstItem(ref ty, ref default) => {
let id = derive_id(format!("{}.{}", item_type, name));
let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
assoc_const(w, item, ty, default.as_ref(), link.anchor(&id))?;
write!(w, "</code></span></h4>\n")?;
}
clean::ConstantItem(ref c) => {
let id = derive_id(format!("{}.{}", item_type, name));
let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
assoc_const(w, item, &c.type_, Some(&c.expr), link.anchor(&id))?;
write!(w, "</code></span></h4>\n")?;
}
clean::AssociatedTypeItem(ref bounds, ref default) => {
let id = derive_id(format!("{}.{}", item_type, name));
let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id))?;
write!(w, "</code></span></h4>\n")?;
}
clean::StrippedItem(..) => return Ok(()),
_ => panic!("can't make docs for trait item with name {:?}", item.name)
}
if render_method_item || render_mode == RenderMode::Normal {
if !is_default_item {
if let Some(t) = trait_ {
// The trait item may have been stripped so we might not
// find any documentation or stability for it.
if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
// We need the stability of the item from the trait
// because impls can't have a stability.
document_stability(w, cx, it)?;
if item.doc_value().is_some() {
document_full(w, item)?;
} else {
// In case the item isn't documented,
// provide short documentation from the trait.
document_short(w, it, link)?;
}
}
} else {
document(w, cx, item)?;
}
} else {
document_stability(w, cx, item)?;
document_short(w, item, link)?;
}
}
Ok(())
}
let traits = &cache().traits;
let trait_ = i.trait_did().and_then(|did| traits.get(&did));
write!(w, "<div class='impl-items'>")?;
for trait_item in &i.inner_impl().items {
doc_impl_item(w, cx, trait_item, link, render_mode,
false, outer_version, trait_)?;
}
fn render_default_items(w: &mut fmt::Formatter,
cx: &Context,
t: &clean::Trait,
i: &clean::Impl,
render_mode: RenderMode,
outer_version: Option<&str>) -> fmt::Result {
for trait_item in &t.items {
let n = trait_item.name.clone();
if i.items.iter().find(|m| m.name == n).is_some() {
continue;
}
let did = i.trait_.as_ref().unwrap().def_id().unwrap();
let assoc_link = AssocItemLink::GotoSource(did, &i.provided_trait_methods);
doc_impl_item(w, cx, trait_item, assoc_link, render_mode, true,
outer_version, None)?;
}
Ok(())
}
// If we've implemented a trait, then also emit documentation for all
// default items which weren't overridden in the implementation block.
if let Some(t) = trait_ {
render_default_items(w, cx, t, &i.inner_impl(), render_mode, outer_version)?;
}
write!(w, "</div>")?;
Ok(())
}
fn item_typedef(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
t: &clean::Typedef) -> fmt::Result {
let indent = format!("type {}{:#} ", it.name.as_ref().unwrap(), t.generics).len();
write!(w, "<pre class='rust typedef'>type {}{}{where_clause} = {type_};</pre>",
it.name.as_ref().unwrap(),
t.generics,
where_clause = WhereClause(&t.generics, indent),
type_ = t.type_)?;
document(w, cx, it)
}
impl<'a> fmt::Display for Sidebar<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let cx = self.cx;
let it = self.item;
let parentlen = cx.current.len() - if it.is_mod() {1} else {0};
// The sidebar is designed to display sibling functions, modules and
// other miscellaneous information. since there are lots of sibling
// items (and that causes quadratic growth in large modules),
// we refactor common parts into a shared JavaScript file per module.
// still, we don't move everything into JS because we want to preserve
// as much HTML as possible in order to allow non-JS-enabled browsers
// to navigate the documentation (though slightly inefficiently).
write!(fmt, "<p class='location'>")?;
for (i, name) in cx.current.iter().take(parentlen).enumerate() {
if i > 0 {
write!(fmt, "::<wbr>")?;
}
write!(fmt, "<a href='{}index.html'>{}</a>",
&cx.root_path()[..(cx.current.len() - i - 1) * 3],
*name)?;
}
write!(fmt, "</p>")?;
// Sidebar refers to the enclosing module, not this module.
let relpath = if it.is_mod() { "../" } else { "" };
write!(fmt,
"<script>window.sidebarCurrent = {{\
name: '{name}', \
ty: '{ty}', \
relpath: '{path}'\
}};</script>",
name = it.name.as_ref().map(|x| &x[..]).unwrap_or(""),
ty = it.type_().css_class(),
path = relpath)?;
if parentlen == 0 {
// There is no sidebar-items.js beyond the crate root path
// FIXME maybe dynamic crate loading can be merged here
} else {
write!(fmt, "<script defer src=\"{path}sidebar-items.js\"></script>",
path = relpath)?;
}
Ok(())
}
}
impl<'a> fmt::Display for Source<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let Source(s) = *self;
let lines = s.lines().count();
let mut cols = 0;
let mut tmp = lines;
while tmp > 0 {
cols += 1;
tmp /= 10;
}
write!(fmt, "<pre class=\"line-numbers\">")?;
for i in 1..lines + 1 {
write!(fmt, "<span id=\"{0}\">{0:1$}</span>\n", i, cols)?;
}
write!(fmt, "</pre>")?;
write!(fmt, "{}", highlight::render_with_highlighting(s, None, None, None))?;
Ok(())
}
}
fn item_macro(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
t: &clean::Macro) -> fmt::Result {
w.write_str(&highlight::render_with_highlighting(&t.source,
Some("macro"),
None,
None))?;
document(w, cx, it)
}
fn item_primitive(w: &mut fmt::Formatter, cx: &Context,
it: &clean::Item,
_p: &clean::PrimitiveType) -> fmt::Result {
document(w, cx, it)?;
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
const BASIC_KEYWORDS: &'static str = "rust, rustlang, rust-lang";
fn make_item_keywords(it: &clean::Item) -> String {
format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())
}
fn get_index_search_type(item: &clean::Item) -> Option<IndexItemFunctionType> {
let decl = match item.inner {
clean::FunctionItem(ref f) => &f.decl,
clean::MethodItem(ref m) => &m.decl,
clean::TyMethodItem(ref m) => &m.decl,
_ => return None
};
let inputs = decl.inputs.values.iter().map(|arg| get_index_type(&arg.type_)).collect();
let output = match decl.output {
clean::FunctionRetTy::Return(ref return_type) => Some(get_index_type(return_type)),
_ => None
};
Some(IndexItemFunctionType { inputs: inputs, output: output })
}
fn get_index_type(clean_type: &clean::Type) -> Type {
Type { name: get_index_type_name(clean_type).map(|s| s.to_ascii_lowercase()) }
}
fn get_index_type_name(clean_type: &clean::Type) -> Option<String> {
match *clean_type {
clean::ResolvedPath { ref path, .. } => {
let segments = &path.segments;
Some(segments[segments.len() - 1].name.clone())
},
clean::Generic(ref s) => Some(s.clone()),
clean::Primitive(ref p) => Some(format!("{:?}", p)),
clean::BorrowedRef { ref type_, .. } => get_index_type_name(type_),
// FIXME: add all from clean::Type.
_ => None
}
}
pub fn cache() -> Arc<Cache> {
CACHE_KEY.with(|c| c.borrow().clone())
}
#[cfg(test)]
#[test]
fn test_unique_id() {
let input = ["foo", "examples", "examples", "method.into_iter","examples",
"method.into_iter", "foo", "main", "search", "methods",
"examples", "method.into_iter", "assoc_type.Item", "assoc_type.Item"];
let expected = ["foo", "examples", "examples-1", "method.into_iter", "examples-2",
"method.into_iter-1", "foo-1", "main-1", "search-1", "methods-1",
"examples-3", "method.into_iter-2", "assoc_type.Item", "assoc_type.Item-1"];
let test = || {
let actual: Vec<String> = input.iter().map(|s| derive_id(s.to_string())).collect();
assert_eq!(&actual[..], expected);
};
test();
reset_ids(true);
test();
}
| 38.793269 | 100 | 0.507349 |
b9f153a6f71316c8eef16d03ca182ce84b028df7 | 5,940 | /* Christopher Piraino
*
*
* Ferrous Threads
*
*/
//! A TaskRunner is used when a number of short-lived tasks need to be asynchronously
//! done. Here we negate the cost of thread startup by using a fixed amount of threads
//! to serve a potentially infinite series of tasks.
//!
//! # Warnings
//! Currently the mpmc channel implementation has some bugs, it is currently not recommended
//! to use the TaskRunner yet.
use std::mem;
use std::boxed::FnBox;
use std::fmt;
use std::error::Error;
use std::thread::{spawn, JoinHandle};
use canal::mpmc::{Sender, Receiver, mpmc_channel};
const QUEUE_SIZE: usize = ((0 - 1) as u8) as usize;
enum Task<'a> {
Data(TaskData<'a>),
Stop,
}
struct TaskData<'a> {
task_func: Box<FnBox() + Send + 'a>,
}
impl<'a> TaskData<'a> {
fn run(self) {
self.task_func.call_box(())
}
}
impl<'a> Task<'a> {
fn new<F>(func: F) -> Task<'a> where F: FnOnce() + Send + 'a {
Task::Data(TaskData { task_func: box func })
}
}
/// An error sending to a running thread.
#[derive(Debug)]
pub struct SendError;
impl Error for SendError {
fn description(&self) -> &str {
"Could not send to a thread"
}
}
impl fmt::Display for SendError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
"Could not send to a thread".fmt(f)
}
}
/// A TaskRunner is used to run short-lived tasks in parallel without having to
/// spin up a new thread each and every time.
///
/// The TaskRunner will immediately spin up the number of threads that was passed in
/// on creation.
///
/// Spins up a number of threads and distbutes the enqueued tasks through a
/// multi-producer/multi-consumer queue. This allows every worker to draw from the same queue,
/// ensuring that work will be efficiently distributed across the threads.
///
/// # Panics
/// The failure case of a task panicking and destroying the worker is not handled.
///
/// # Examples
/// ```
/// use ferrous_threads::task_runner::TaskRunner;
///
/// use std::sync::mpsc::channel;
///
/// let (sn, rc) = channel();
/// let taskpool = TaskRunner::new(1);
/// taskpool.enqueue(move || { sn.send(9u8).unwrap();}).ok().expect("Task not enqueued");
/// assert!(rc.recv().unwrap() == 9u8);
/// ```
pub struct TaskRunner<'a> {
queue: Sender<Task<'a>>,
workers: Vec<JoinHandle<()>>,
}
impl<'a> TaskRunner<'a> {
/// Create a new TaskRunner with specified number of threads.
pub fn new(num_threads: u8) -> TaskRunner<'a> {
let (sn, rc): (Sender<Task<'a>>, Receiver<Task<'a>>) = mpmc_channel::<Task>(QUEUE_SIZE);
let mut guards = Vec::new();
for _i in 0..num_threads {
// spawned threads cannot guarantee lifetimes, but we explicitly join on Drop.
let rc: Receiver<Task<'static>> = unsafe { mem::transmute(rc.clone()) };
let thr = spawn(move || { TaskRunner::worker(rc) });
guards.push(thr);
}
TaskRunner { queue: sn, workers: guards }
}
/// Places the enqueued function on the worker queue.
pub fn enqueue<F>(&self, func: F) -> Result<(), SendError> where F: 'a + FnOnce() + Send {
let task = Task::new(func);
let res = self.queue.send(task);
match res {
Ok(_) => Ok(()),
Err(_) => Err(SendError),
}
}
fn worker(rc: Receiver<Task>) {
loop {
let msg = rc.recv();
match msg {
Ok(Task::Data(task)) => task.run(),
Ok(Task::Stop) => break,
Err(_) => break, // TODO: Do something better
}
}
}
}
impl<'a> Drop for TaskRunner<'a> {
fn drop(&mut self) {
// Send stop message without blocking.
for _thr in self.workers.iter() {
self.queue.send(Task::Stop).ok().expect("Could not send a stop message.");
}
for thr in self.workers.drain(..) {
thr.join().expect("Thread panicked");
}
}
}
#[cfg(test)]
mod test {
use super::{TaskData, TaskRunner};
use std::sync::mpsc::{channel};
#[test]
fn test_task() {
let (sn, rc) = channel::<u8>();
let task_closure = move || {
sn.send(0u8).unwrap();
};
let task = TaskData { task_func: box task_closure };
task.run();
assert!(rc.recv().unwrap() == 0);
}
#[test]
fn test_task_vector() {
let (sn1, rc1) = channel::<isize>();
let (sn2, rc2) = channel::<Option<u8>>();
let task_closure = move || {
sn1.send(10).unwrap();
};
let int_task = TaskData { task_func: box task_closure };
let task_closure = move || {
sn2.send(Some(10u8)).unwrap();
};
let task = TaskData { task_func: box task_closure };
let vec = vec![int_task, task];
for t in vec.into_iter() {
t.run();
}
assert!(rc1.recv().unwrap() == 10);
assert!(rc2.recv().unwrap().is_some());
}
#[test]
fn test_task_pool() {
let (sn1, rc1) = channel::<isize>();
let task_closure = move || {
sn1.send(10).unwrap();
};
let taskpool = TaskRunner::new(1);
taskpool.enqueue(task_closure).ok().expect("Task not enqueued");
assert_eq!(rc1.recv().unwrap(), 10);
}
#[test]
fn test_task_pool_multi_workers() {
let (sn1, rc1) = channel::<isize>();
let sn2 = sn1.clone();
let task_closure = move || {
sn1.send(10).unwrap();
};
let task_closure2 = move || {
sn2.send(10).unwrap();
};
let taskpool = TaskRunner::new(3);
taskpool.enqueue(task_closure).ok().expect("Task not enqueued");
taskpool.enqueue(task_closure2).ok().expect("Task not enqueued");
assert_eq!(rc1.recv().unwrap(), 10);
assert_eq!(rc1.recv().unwrap(), 10);
}
}
| 28.285714 | 96 | 0.566162 |
14bdbcd16940a829cb3a63418877b4ce31732dcb | 72,640 | use unicode_width::UnicodeWidthChar;
use std::{
cmp::Ordering,
collections::{BTreeSet, VecDeque},
fmt::{self, Debug, Formatter},
str,
};
use zellij_utils::{vte, zellij_tile};
const TABSTOP_WIDTH: usize = 8; // TODO: is this always right?
const SCROLL_BACK: usize = 10_000;
use vte::{Params, Perform};
use zellij_tile::data::{Palette, PaletteColor};
use zellij_utils::{consts::VERSION, logging::debug_log_to_file, shared::version_number};
use crate::panes::terminal_character::{
CharacterStyles, CharsetIndex, Cursor, CursorShape, StandardCharset, TerminalCharacter,
EMPTY_TERMINAL_CHARACTER,
};
// this was copied verbatim from alacritty
fn parse_number(input: &[u8]) -> Option<u8> {
if input.is_empty() {
return None;
}
let mut num: u8 = 0;
for c in input {
let c = *c as char;
if let Some(digit) = c.to_digit(10) {
num = match num.checked_mul(10).and_then(|v| v.checked_add(digit as u8)) {
Some(v) => v,
None => return None,
}
} else {
return None;
}
}
Some(num)
}
fn get_top_non_canonical_rows(rows: &mut Vec<Row>) -> Vec<Row> {
let mut index_of_last_non_canonical_row = None;
for (i, row) in rows.iter().enumerate() {
if row.is_canonical {
break;
} else {
index_of_last_non_canonical_row = Some(i);
}
}
match index_of_last_non_canonical_row {
Some(index_of_last_non_canonical_row) => {
rows.drain(..=index_of_last_non_canonical_row).collect()
}
None => vec![],
}
}
fn get_bottom_canonical_row_and_wraps(rows: &mut VecDeque<Row>) -> Vec<Row> {
let mut index_of_last_non_canonical_row = None;
for (i, row) in rows.iter().enumerate().rev() {
index_of_last_non_canonical_row = Some(i);
if row.is_canonical {
break;
}
}
match index_of_last_non_canonical_row {
Some(index_of_last_non_canonical_row) => {
rows.drain(index_of_last_non_canonical_row..).collect()
}
None => vec![],
}
}
fn transfer_rows_down(
source: &mut VecDeque<Row>,
destination: &mut Vec<Row>,
count: usize,
max_src_width: Option<usize>,
max_dst_width: Option<usize>,
) {
let mut next_lines: Vec<Row> = vec![];
let mut lines_added_to_destination: isize = 0;
loop {
if lines_added_to_destination as usize == count {
break;
}
if next_lines.is_empty() {
match source.pop_back() {
Some(next_line) => {
let mut top_non_canonical_rows_in_dst = get_top_non_canonical_rows(destination);
lines_added_to_destination -= top_non_canonical_rows_in_dst.len() as isize;
next_lines.push(next_line);
next_lines.append(&mut top_non_canonical_rows_in_dst);
next_lines = match max_dst_width {
Some(max_row_width) => {
Row::from_rows(next_lines).split_to_rows_of_length(max_row_width)
}
None => vec![Row::from_rows(next_lines)],
};
if next_lines.is_empty() {
// no more lines at source, the line we popped was probably empty
break;
}
}
None => break, // no more rows
}
}
destination.insert(0, next_lines.pop().unwrap());
lines_added_to_destination += 1;
}
if !next_lines.is_empty() {
match max_src_width {
Some(max_row_width) => {
let excess_rows = Row::from_rows(next_lines).split_to_rows_of_length(max_row_width);
source.extend(excess_rows);
}
None => {
let excess_row = Row::from_rows(next_lines);
bounded_push(source, excess_row);
}
}
}
}
fn transfer_rows_up(
source: &mut Vec<Row>,
destination: &mut VecDeque<Row>,
count: usize,
max_src_width: Option<usize>,
max_dst_width: Option<usize>,
) {
let mut next_lines: Vec<Row> = vec![];
for _ in 0..count {
if next_lines.is_empty() {
if !source.is_empty() {
let next_line = source.remove(0);
if !next_line.is_canonical {
let mut bottom_canonical_row_and_wraps_in_dst =
get_bottom_canonical_row_and_wraps(destination);
next_lines.append(&mut bottom_canonical_row_and_wraps_in_dst);
}
next_lines.push(next_line);
next_lines = match max_dst_width {
Some(max_row_width) => {
Row::from_rows(next_lines).split_to_rows_of_length(max_row_width)
}
None => vec![Row::from_rows(next_lines)],
};
} else {
break; // no more rows
}
}
bounded_push(destination, next_lines.remove(0));
}
if !next_lines.is_empty() {
match max_src_width {
Some(max_row_width) => {
let excess_rows = Row::from_rows(next_lines).split_to_rows_of_length(max_row_width);
for row in excess_rows {
source.insert(0, row);
}
}
None => {
let excess_row = Row::from_rows(next_lines);
source.insert(0, excess_row);
}
}
}
}
fn bounded_push(vec: &mut VecDeque<Row>, value: Row) {
if vec.len() >= SCROLL_BACK {
vec.pop_front();
}
vec.push_back(value)
}
pub fn create_horizontal_tabstops(columns: usize) -> BTreeSet<usize> {
let mut i = TABSTOP_WIDTH;
let mut horizontal_tabstops = BTreeSet::new();
loop {
if i > columns {
break;
}
horizontal_tabstops.insert(i);
i += TABSTOP_WIDTH;
}
horizontal_tabstops
}
#[derive(Clone)]
pub struct Grid {
lines_above: VecDeque<Row>,
viewport: Vec<Row>,
lines_below: Vec<Row>,
horizontal_tabstops: BTreeSet<usize>,
alternative_lines_above_viewport_and_cursor: Option<(VecDeque<Row>, Vec<Row>, Cursor)>,
cursor: Cursor,
saved_cursor_position: Option<Cursor>,
scroll_region: Option<(usize, usize)>,
active_charset: CharsetIndex,
preceding_char: Option<TerminalCharacter>,
colors: Palette,
pub should_render: bool,
pub cursor_key_mode: bool, // DECCKM - when set, cursor keys should send ANSI direction codes (eg. "OD") instead of the arrow keys (eg. "[D")
pub erasure_mode: bool, // ERM
pub insert_mode: bool,
pub disable_linewrap: bool,
pub clear_viewport_before_rendering: bool,
pub width: usize,
pub height: usize,
pub pending_messages_to_pty: Vec<Vec<u8>>,
}
impl Debug for Grid {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
for (i, row) in self.viewport.iter().enumerate() {
if row.is_canonical {
writeln!(f, "{:02?} (C): {:?}", i, row)?;
} else {
writeln!(f, "{:02?} (W): {:?}", i, row)?;
}
}
Ok(())
}
}
impl Grid {
pub fn new(rows: usize, columns: usize, colors: Palette) -> Self {
Grid {
lines_above: VecDeque::with_capacity(SCROLL_BACK),
viewport: vec![Row::new().canonical()],
lines_below: vec![],
horizontal_tabstops: create_horizontal_tabstops(columns),
cursor: Cursor::new(0, 0),
saved_cursor_position: None,
scroll_region: None,
preceding_char: None,
width: columns,
height: rows,
should_render: true,
cursor_key_mode: false,
erasure_mode: false,
insert_mode: false,
disable_linewrap: false,
alternative_lines_above_viewport_and_cursor: None,
clear_viewport_before_rendering: false,
active_charset: Default::default(),
pending_messages_to_pty: vec![],
colors,
}
}
pub fn advance_to_next_tabstop(&mut self, styles: CharacterStyles) {
let mut next_tabstop = None;
for tabstop in self.horizontal_tabstops.iter() {
if *tabstop > self.cursor.x {
next_tabstop = Some(tabstop);
break;
}
}
match next_tabstop {
Some(tabstop) => {
self.cursor.x = *tabstop;
}
None => {
self.cursor.x = self.width.saturating_sub(1);
}
}
let mut empty_character = EMPTY_TERMINAL_CHARACTER;
empty_character.styles = styles;
self.pad_current_line_until(self.cursor.x);
}
pub fn move_to_previous_tabstop(&mut self) {
let mut previous_tabstop = None;
for tabstop in self.horizontal_tabstops.iter() {
if *tabstop >= self.cursor.x {
break;
}
previous_tabstop = Some(tabstop);
}
match previous_tabstop {
Some(tabstop) => {
self.cursor.x = *tabstop;
}
None => {
self.cursor.x = 0;
}
}
}
pub fn cursor_shape(&self) -> CursorShape {
self.cursor.get_shape()
}
fn set_horizontal_tabstop(&mut self) {
self.horizontal_tabstops.insert(self.cursor.x);
}
fn clear_tabstop(&mut self, position: usize) {
self.horizontal_tabstops.remove(&position);
}
fn clear_all_tabstops(&mut self) {
self.horizontal_tabstops.clear();
}
fn save_cursor_position(&mut self) {
self.saved_cursor_position = Some(self.cursor.clone());
}
fn restore_cursor_position(&mut self) {
if let Some(saved_cursor_position) = self.saved_cursor_position.as_ref() {
self.cursor = saved_cursor_position.clone();
}
}
fn configure_charset(&mut self, charset: StandardCharset, index: CharsetIndex) {
self.cursor.charsets[index] = charset;
}
fn set_active_charset(&mut self, index: CharsetIndex) {
self.active_charset = index;
}
fn cursor_canonical_line_index(&self) -> usize {
let mut cursor_canonical_line_index = 0;
let mut canonical_lines_traversed = 0;
for (i, line) in self.viewport.iter().enumerate() {
if line.is_canonical {
cursor_canonical_line_index = canonical_lines_traversed;
canonical_lines_traversed += 1;
}
if i == self.cursor.y {
break;
}
}
cursor_canonical_line_index
}
// TODO: merge these two funtions
fn cursor_index_in_canonical_line(&self) -> usize {
let mut cursor_canonical_line_index = 0;
let mut cursor_index_in_canonical_line = 0;
for (i, line) in self.viewport.iter().enumerate() {
if line.is_canonical {
cursor_canonical_line_index = i;
}
if i == self.cursor.y {
let line_wrap_position_in_line = self.cursor.y - cursor_canonical_line_index;
cursor_index_in_canonical_line = line_wrap_position_in_line + self.cursor.x;
break;
}
}
cursor_index_in_canonical_line
}
fn canonical_line_y_coordinates(&self, canonical_line_index: usize) -> usize {
let mut canonical_lines_traversed = 0;
let mut y_coordinates = 0;
for (i, line) in self.viewport.iter().enumerate() {
if line.is_canonical {
canonical_lines_traversed += 1;
if canonical_lines_traversed == canonical_line_index + 1 {
y_coordinates = i;
break;
}
}
}
y_coordinates
}
pub fn scroll_up_one_line(&mut self) {
if !self.lines_above.is_empty() && self.viewport.len() == self.height {
let line_to_push_down = self.viewport.pop().unwrap();
self.lines_below.insert(0, line_to_push_down);
let line_to_insert_at_viewport_top = self.lines_above.pop_back().unwrap();
self.viewport.insert(0, line_to_insert_at_viewport_top);
}
}
pub fn scroll_down_one_line(&mut self) {
if !self.lines_below.is_empty() && self.viewport.len() == self.height {
let mut line_to_push_up = self.viewport.remove(0);
if line_to_push_up.is_canonical {
bounded_push(&mut self.lines_above, line_to_push_up);
} else {
let mut last_line_above = self.lines_above.pop_back().unwrap();
last_line_above.append(&mut line_to_push_up.columns);
bounded_push(&mut self.lines_above, last_line_above);
}
let line_to_insert_at_viewport_bottom = self.lines_below.remove(0);
self.viewport.push(line_to_insert_at_viewport_bottom);
}
}
pub fn change_size(&mut self, new_rows: usize, new_columns: usize) {
if new_columns != self.width {
let mut cursor_canonical_line_index = self.cursor_canonical_line_index();
let cursor_index_in_canonical_line = self.cursor_index_in_canonical_line();
let mut viewport_canonical_lines = vec![];
for mut row in self.viewport.drain(..) {
if !row.is_canonical
&& viewport_canonical_lines.is_empty()
&& !self.lines_above.is_empty()
{
let mut first_line_above = self.lines_above.pop_back().unwrap();
first_line_above.append(&mut row.columns);
viewport_canonical_lines.push(first_line_above);
cursor_canonical_line_index += 1;
} else if row.is_canonical {
viewport_canonical_lines.push(row);
} else {
match viewport_canonical_lines.last_mut() {
Some(last_line) => {
last_line.append(&mut row.columns);
}
None => {
// the state is corrupted somehow
// this is a bug and I'm not yet sure why it happens
// usually it fixes itself and is a result of some race
// TODO: investigate why this happens and solve it
return;
}
}
}
}
let mut new_viewport_rows = vec![];
for mut canonical_line in viewport_canonical_lines {
let mut canonical_line_parts: Vec<Row> = vec![];
if canonical_line.columns.is_empty() {
canonical_line_parts.push(Row::new().canonical());
}
while !canonical_line.columns.is_empty() {
let next_wrap = if canonical_line.width() > new_columns {
canonical_line.drain_until(new_columns)
} else {
canonical_line.columns.drain(..).collect()
};
let row = Row::from_columns(next_wrap);
// if there are no more parts, this row is canonical as long as it originally
// was canonical (it might not have been for example if it's the first row in
// the viewport, and the actual canonical row is above it in the scrollback)
let row = if canonical_line_parts.is_empty() && canonical_line.is_canonical {
row.canonical()
} else {
row
};
canonical_line_parts.push(row);
}
new_viewport_rows.append(&mut canonical_line_parts);
}
self.viewport = new_viewport_rows;
let mut new_cursor_y = self.canonical_line_y_coordinates(cursor_canonical_line_index);
let new_cursor_x = (cursor_index_in_canonical_line / new_columns)
+ (cursor_index_in_canonical_line % new_columns);
let current_viewport_row_count = self.viewport.len();
match current_viewport_row_count.cmp(&self.height) {
Ordering::Less => {
let row_count_to_transfer = self.height - current_viewport_row_count;
transfer_rows_down(
&mut self.lines_above,
&mut self.viewport,
row_count_to_transfer,
None,
Some(new_columns),
);
let rows_pulled = self.viewport.len() - current_viewport_row_count;
new_cursor_y += rows_pulled;
}
Ordering::Greater => {
let row_count_to_transfer = current_viewport_row_count - self.height;
if row_count_to_transfer > new_cursor_y {
new_cursor_y = 0;
} else {
new_cursor_y -= row_count_to_transfer;
}
transfer_rows_up(
&mut self.viewport,
&mut self.lines_above,
row_count_to_transfer,
Some(new_columns),
None,
);
}
Ordering::Equal => {}
}
self.cursor.y = new_cursor_y;
self.cursor.x = new_cursor_x;
}
if new_rows != self.height {
let current_viewport_row_count = self.viewport.len();
match current_viewport_row_count.cmp(&new_rows) {
Ordering::Less => {
let row_count_to_transfer = new_rows - current_viewport_row_count;
transfer_rows_down(
&mut self.lines_above,
&mut self.viewport,
row_count_to_transfer,
None,
Some(new_columns),
);
let rows_pulled = self.viewport.len() - current_viewport_row_count;
self.cursor.y += rows_pulled;
}
Ordering::Greater => {
let row_count_to_transfer = current_viewport_row_count - new_rows;
if row_count_to_transfer > self.cursor.y {
self.cursor.y = 0;
} else {
self.cursor.y -= row_count_to_transfer;
}
transfer_rows_up(
&mut self.viewport,
&mut self.lines_above,
row_count_to_transfer,
Some(new_columns),
None,
);
}
Ordering::Equal => {}
}
}
self.height = new_rows;
self.width = new_columns;
if self.scroll_region.is_some() {
self.set_scroll_region_to_viewport_size();
}
}
pub fn as_character_lines(&self) -> Vec<Vec<TerminalCharacter>> {
let mut lines: Vec<Vec<TerminalCharacter>> = self
.viewport
.iter()
.map(|r| {
let excess_width = r.excess_width();
let mut line: Vec<TerminalCharacter> = r.columns.iter().copied().collect();
// pad line
line.resize(
self.width.saturating_sub(excess_width),
EMPTY_TERMINAL_CHARACTER,
);
line
})
.collect();
let empty_row = vec![EMPTY_TERMINAL_CHARACTER; self.width];
for _ in lines.len()..self.height {
lines.push(empty_row.clone());
}
lines
}
pub fn cursor_coordinates(&self) -> Option<(usize, usize)> {
if self.cursor.is_hidden {
None
} else {
Some((self.cursor.x, self.cursor.y))
}
}
pub fn move_viewport_up(&mut self, count: usize) {
for _ in 0..count {
self.scroll_up_one_line();
}
}
pub fn move_viewport_down(&mut self, count: usize) {
for _ in 0..count {
self.scroll_down_one_line();
}
}
pub fn reset_viewport(&mut self) {
let row_count_below = self.lines_below.len();
for _ in 0..row_count_below {
self.scroll_down_one_line();
}
}
pub fn rotate_scroll_region_up(&mut self, count: usize) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
for _ in 0..count {
let columns = vec![EMPTY_TERMINAL_CHARACTER; self.width];
if scroll_region_bottom < self.viewport.len() {
self.viewport.remove(scroll_region_bottom);
}
if scroll_region_top < self.viewport.len() {
self.viewport
.insert(scroll_region_top, Row::from_columns(columns).canonical());
}
}
}
}
pub fn rotate_scroll_region_down(&mut self, count: usize) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
for _ in 0..count {
let columns = vec![EMPTY_TERMINAL_CHARACTER; self.width];
self.viewport.remove(scroll_region_top);
if self.viewport.len() > scroll_region_top {
self.viewport
.insert(scroll_region_bottom, Row::from_columns(columns).canonical());
} else {
self.viewport.push(Row::from_columns(columns).canonical());
}
}
}
}
pub fn fill_viewport(&mut self, character: TerminalCharacter) {
self.viewport.clear();
for _ in 0..self.height {
let columns = vec![character; self.width];
self.viewport.push(Row::from_columns(columns).canonical());
}
}
pub fn add_canonical_line(&mut self) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
if self.cursor.y == scroll_region_bottom {
// end of scroll region
// when we have a scroll region set and we're at its bottom
// we need to delete its first line, thus shifting all lines in it upwards
// then we add an empty line at its end which will be filled by the application
// controlling the scroll region (presumably filled by whatever comes next in the
// scroll buffer, but that's not something we control)
if scroll_region_top >= self.viewport.len() {
// the state is corrupted
return;
}
self.viewport.remove(scroll_region_top);
let columns = vec![EMPTY_TERMINAL_CHARACTER; self.width];
if self.viewport.len() >= scroll_region_bottom {
self.viewport
.insert(scroll_region_bottom, Row::from_columns(columns).canonical());
} else {
self.viewport.push(Row::from_columns(columns).canonical());
}
return;
}
}
if self.viewport.len() <= self.cursor.y + 1 {
// FIXME: this should add an empty line with the pad_character
// but for some reason this breaks rendering in various situations
// it needs to be investigated and fixed
let new_row = Row::new().canonical();
self.viewport.push(new_row);
}
if self.cursor.y == self.height - 1 {
let row_count_to_transfer = 1;
transfer_rows_up(
&mut self.viewport,
&mut self.lines_above,
row_count_to_transfer,
Some(self.width),
None,
);
} else {
self.cursor.y += 1;
}
}
pub fn move_cursor_to_beginning_of_line(&mut self) {
self.cursor.x = 0;
}
pub fn insert_character_at_cursor_position(&mut self, terminal_character: TerminalCharacter) {
match self.viewport.get_mut(self.cursor.y) {
Some(row) => {
row.insert_character_at(terminal_character, self.cursor.x);
if row.len() > self.width {
row.truncate(self.width);
}
}
None => {
// pad lines until cursor if they do not exist
for _ in self.viewport.len()..self.cursor.y {
self.viewport.push(Row::new().canonical());
}
self.viewport
.push(Row::new().with_character(terminal_character).canonical());
}
}
}
pub fn add_character_at_cursor_position(
&mut self,
terminal_character: TerminalCharacter,
max_width: usize,
) {
match self.viewport.get_mut(self.cursor.y) {
Some(row) => {
if self.insert_mode {
row.insert_character_at(terminal_character, self.cursor.x);
} else {
row.add_character_at(terminal_character, self.cursor.x);
}
row.truncate(max_width);
}
None => {
// pad lines until cursor if they do not exist
for _ in self.viewport.len()..self.cursor.y {
self.viewport.push(Row::new().canonical());
}
self.viewport
.push(Row::new().with_character(terminal_character).canonical());
}
}
}
pub fn add_character(&mut self, terminal_character: TerminalCharacter) {
// TODO: try to separate adding characters from moving the cursors in this function
let character_width = terminal_character.width;
if self.cursor.x >= self.width {
if self.disable_linewrap {
return;
}
// line wrap
self.cursor.x = 0;
if self.cursor.y == self.height - 1 {
let row_count_to_transfer = 1;
transfer_rows_up(
&mut self.viewport,
&mut self.lines_above,
row_count_to_transfer,
Some(self.width),
None,
);
let wrapped_row = Row::new();
self.viewport.push(wrapped_row);
} else {
self.cursor.y += 1;
if self.viewport.len() <= self.cursor.y {
let line_wrapped_row = Row::new();
self.viewport.push(line_wrapped_row);
}
}
}
self.add_character_at_cursor_position(terminal_character, self.width);
self.move_cursor_forward_until_edge(character_width);
}
pub fn move_cursor_forward_until_edge(&mut self, count: usize) {
let count_to_move = std::cmp::min(count, self.width - (self.cursor.x));
self.cursor.x += count_to_move;
}
pub fn replace_characters_in_line_after_cursor(&mut self, replace_with: TerminalCharacter) {
self.viewport
.get_mut(self.cursor.y)
.unwrap()
.replace_and_pad_end(self.cursor.x, self.width, replace_with);
}
pub fn replace_characters_in_line_before_cursor(&mut self, replace_with: TerminalCharacter) {
let row = self.viewport.get_mut(self.cursor.y).unwrap();
row.replace_and_pad_beginning(self.cursor.x, replace_with);
}
pub fn clear_all_after_cursor(&mut self, replace_with: TerminalCharacter) {
if let Some(cursor_row) = self.viewport.get_mut(self.cursor.y) {
cursor_row.truncate(self.cursor.x);
let replace_with_columns = vec![replace_with; self.width];
self.replace_characters_in_line_after_cursor(replace_with);
for row in self.viewport.iter_mut().skip(self.cursor.y + 1) {
row.replace_columns(replace_with_columns.clone());
}
}
}
pub fn clear_all_before_cursor(&mut self, replace_with: TerminalCharacter) {
if self.viewport.get(self.cursor.y).is_some() {
self.replace_characters_in_line_before_cursor(replace_with);
let replace_with_columns = vec![replace_with; self.width];
for row in self.viewport.iter_mut().take(self.cursor.y) {
row.replace_columns(replace_with_columns.clone());
}
}
}
pub fn clear_cursor_line(&mut self) {
self.viewport.get_mut(self.cursor.y).unwrap().truncate(0);
}
pub fn clear_all(&mut self, replace_with: TerminalCharacter) {
let replace_with_columns = vec![replace_with; self.width];
self.replace_characters_in_line_after_cursor(replace_with);
for row in self.viewport.iter_mut() {
row.replace_columns(replace_with_columns.clone());
}
}
fn pad_current_line_until(&mut self, position: usize) {
let current_row = self.viewport.get_mut(self.cursor.y).unwrap();
for _ in current_row.len()..position {
current_row.push(EMPTY_TERMINAL_CHARACTER);
}
}
fn pad_lines_until(&mut self, position: usize, pad_character: TerminalCharacter) {
for _ in self.viewport.len()..=position {
let columns = vec![pad_character; self.width];
self.viewport.push(Row::from_columns(columns).canonical());
}
}
pub fn move_cursor_to(&mut self, x: usize, y: usize, pad_character: TerminalCharacter) {
match self.scroll_region {
Some((scroll_region_top, scroll_region_bottom)) => {
self.cursor.x = std::cmp::min(self.width - 1, x);
let y_offset = if self.erasure_mode {
scroll_region_top
} else {
0
};
self.cursor.y = std::cmp::min(scroll_region_bottom, y + y_offset);
self.pad_lines_until(self.cursor.y, pad_character);
self.pad_current_line_until(self.cursor.x);
}
None => {
self.cursor.x = std::cmp::min(self.width - 1, x);
self.cursor.y = std::cmp::min(self.height - 1, y);
self.pad_lines_until(self.cursor.y, pad_character);
self.pad_current_line_until(self.cursor.x);
}
}
}
pub fn move_cursor_up(&mut self, count: usize) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
if self.cursor.y >= scroll_region_top && self.cursor.y <= scroll_region_bottom {
self.cursor.y =
std::cmp::max(self.cursor.y.saturating_sub(count), scroll_region_top);
return;
}
}
self.cursor.y = if self.cursor.y < count {
0
} else {
self.cursor.y - count
};
}
pub fn move_cursor_up_with_scrolling(&mut self, count: usize) {
let (scroll_region_top, scroll_region_bottom) =
self.scroll_region.unwrap_or((0, self.height - 1));
for _ in 0..count {
let current_line_index = self.cursor.y;
if current_line_index == scroll_region_top {
// if we're at the top line, we create a new line and remove the last line that
// would otherwise overflow
if scroll_region_bottom < self.viewport.len() {
self.viewport.remove(scroll_region_bottom);
}
self.viewport.insert(current_line_index, Row::new()); // TODO: .canonical() ?
} else if current_line_index > scroll_region_top
&& current_line_index <= scroll_region_bottom
{
self.move_cursor_up(count);
}
}
}
pub fn move_cursor_down(&mut self, count: usize, pad_character: TerminalCharacter) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
if self.cursor.y >= scroll_region_top && self.cursor.y <= scroll_region_bottom {
self.cursor.y = std::cmp::min(self.cursor.y + count, scroll_region_bottom);
return;
}
}
let lines_to_add = if self.cursor.y + count > self.height - 1 {
(self.cursor.y + count) - (self.height - 1)
} else {
0
};
self.cursor.y = if self.cursor.y + count > self.height - 1 {
self.height - 1
} else {
self.cursor.y + count
};
for _ in 0..lines_to_add {
self.add_canonical_line();
}
self.pad_lines_until(self.cursor.y, pad_character);
}
pub fn move_cursor_back(&mut self, count: usize) {
if self.cursor.x == self.width {
// on the rightmost screen edge, backspace skips one character
self.cursor.x -= 1;
}
if self.cursor.x < count {
self.cursor.x = 0;
} else {
self.cursor.x -= count;
}
}
pub fn hide_cursor(&mut self) {
self.cursor.is_hidden = true;
}
pub fn show_cursor(&mut self) {
self.cursor.is_hidden = false;
}
pub fn set_scroll_region(&mut self, top_line_index: usize, bottom_line_index: Option<usize>) {
let bottom_line_index = bottom_line_index.unwrap_or(self.height);
self.scroll_region = Some((top_line_index, bottom_line_index));
}
pub fn clear_scroll_region(&mut self) {
self.scroll_region = None;
}
pub fn set_scroll_region_to_viewport_size(&mut self) {
self.scroll_region = Some((0, self.height - 1));
}
pub fn delete_lines_in_scroll_region(
&mut self,
count: usize,
pad_character: TerminalCharacter,
) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
let current_line_index = self.cursor.y;
if current_line_index >= scroll_region_top && current_line_index <= scroll_region_bottom
{
// when deleting lines inside the scroll region, we must make sure it stays the
// same size (and that other lines below it aren't shifted inside it)
// so we delete the current line(s) and add an empty line at the end of the scroll
// region
for _ in 0..count {
self.viewport.remove(current_line_index);
let columns = vec![pad_character; self.width];
if self.viewport.len() > scroll_region_bottom {
self.viewport
.insert(scroll_region_bottom, Row::from_columns(columns).canonical());
} else {
self.viewport.push(Row::from_columns(columns).canonical());
}
}
}
}
}
pub fn add_empty_lines_in_scroll_region(
&mut self,
count: usize,
pad_character: TerminalCharacter,
) {
if let Some((scroll_region_top, scroll_region_bottom)) = self.scroll_region {
let current_line_index = self.cursor.y;
if current_line_index >= scroll_region_top && current_line_index <= scroll_region_bottom
{
// when adding empty lines inside the scroll region, we must make sure it stays the
// same size and that lines don't "leak" outside of it
// so we add an empty line where the cursor currently is, and delete the last line
// of the scroll region
for _ in 0..count {
if scroll_region_bottom < self.viewport.len() {
self.viewport.remove(scroll_region_bottom);
}
let columns = vec![pad_character; self.width];
self.viewport
.insert(current_line_index, Row::from_columns(columns).canonical());
}
}
}
}
pub fn move_cursor_to_column(&mut self, column: usize) {
self.cursor.x = column;
self.pad_current_line_until(self.cursor.x);
}
pub fn move_cursor_to_line(&mut self, line: usize, pad_character: TerminalCharacter) {
self.cursor.y = std::cmp::min(self.height - 1, line);
self.pad_lines_until(self.cursor.y, pad_character);
self.pad_current_line_until(self.cursor.x);
}
pub fn replace_with_empty_chars(&mut self, count: usize, empty_char_style: CharacterStyles) {
let mut empty_character = EMPTY_TERMINAL_CHARACTER;
empty_character.styles = empty_char_style;
let pad_until = std::cmp::min(self.width, self.cursor.x + count);
self.pad_current_line_until(pad_until);
let current_row = self.viewport.get_mut(self.cursor.y).unwrap();
for i in 0..count {
current_row.replace_character_at(empty_character, self.cursor.x + i);
}
}
pub fn erase_characters(&mut self, count: usize, empty_char_style: CharacterStyles) {
let mut empty_character = EMPTY_TERMINAL_CHARACTER;
empty_character.styles = empty_char_style;
let current_row = self.viewport.get_mut(self.cursor.y).unwrap();
for _ in 0..count {
let deleted_character = current_row.delete_and_return_character(self.cursor.x);
let excess_width = deleted_character
.map(|terminal_character| terminal_character.width)
.unwrap_or(0)
.saturating_sub(1);
for _ in 0..excess_width {
current_row.insert_character_at(empty_character, self.cursor.x);
}
}
}
fn add_newline(&mut self) {
self.add_canonical_line();
self.mark_for_rerender();
}
pub fn mark_for_rerender(&mut self) {
self.should_render = true;
}
fn reset_terminal_state(&mut self) {
self.lines_above = VecDeque::with_capacity(SCROLL_BACK);
self.lines_below = vec![];
self.viewport = vec![Row::new().canonical()];
self.alternative_lines_above_viewport_and_cursor = None;
self.cursor_key_mode = false;
self.scroll_region = None;
self.clear_viewport_before_rendering = true;
self.cursor = Cursor::new(0, 0);
self.saved_cursor_position = None;
self.active_charset = Default::default();
self.erasure_mode = false;
self.disable_linewrap = false;
self.cursor.change_shape(CursorShape::Block);
}
fn set_preceding_character(&mut self, terminal_character: TerminalCharacter) {
self.preceding_char = Some(terminal_character);
}
}
impl Perform for Grid {
fn print(&mut self, c: char) {
let c = self.cursor.charsets[self.active_charset].map(c);
// apparently, building TerminalCharacter like this without a "new" method
// is a little faster
let terminal_character = TerminalCharacter {
character: c,
width: c.width().unwrap_or(0),
styles: self.cursor.pending_styles,
};
self.set_preceding_character(terminal_character);
self.add_character(terminal_character);
}
fn execute(&mut self, byte: u8) {
match byte {
8 => {
// backspace
self.move_cursor_back(1);
}
9 => {
// tab
self.advance_to_next_tabstop(self.cursor.pending_styles);
}
10 | 11 | 12 => {
// 0a, newline
// 0b, vertical tabulation
// 0c, form feed
self.add_newline();
}
13 => {
// 0d, carriage return
self.move_cursor_to_beginning_of_line();
}
14 => {
self.set_active_charset(CharsetIndex::G1);
}
15 => {
self.set_active_charset(CharsetIndex::G0);
}
_ => {}
}
}
fn hook(&mut self, _params: &Params, _intermediates: &[u8], _ignore: bool, _c: char) {
// TBD
}
fn put(&mut self, _byte: u8) {
// TBD
}
fn unhook(&mut self) {
// TBD
}
fn osc_dispatch(&mut self, params: &[&[u8]], bell_terminated: bool) {
let terminator = if bell_terminated { "\x07" } else { "\x1b\\" };
if params.is_empty() || params[0].is_empty() {
return;
}
match params[0] {
// Set window title.
b"0" | b"2" => {
if params.len() >= 2 {
let _title = params[1..]
.iter()
.flat_map(|x| str::from_utf8(x))
.collect::<Vec<&str>>()
.join(";")
.trim()
.to_owned();
// TBD: do something with title?
}
}
// Set color index.
b"4" => {
// TBD: set color index - currently unsupported
//
// this changes a terminal color index to something else
// meaning anything set to that index will be changed
// during rendering
}
// Get/set Foreground, Background, Cursor colors.
b"10" | b"11" | b"12" => {
if params.len() >= 2 {
if let Some(mut dynamic_code) = parse_number(params[0]) {
for param in ¶ms[1..] {
// currently only getting the color sequence is supported,
// setting still isn't
if param == b"?" {
let color_response_message = match self.colors.bg {
PaletteColor::Rgb((r, g, b)) => {
format!(
"\u{1b}]{};rgb:{1:02x}{1:02x}/{2:02x}{2:02x}/{3:02x}{3:02x}{4}",
// dynamic_code, color.r, color.g, color.b, terminator
dynamic_code, r, g, b, terminator
)
}
_ => {
format!(
"\u{1b}]{};rgb:{1:02x}{1:02x}/{2:02x}{2:02x}/{3:02x}{3:02x}{4}",
// dynamic_code, color.r, color.g, color.b, terminator
dynamic_code, 0, 0, 0, terminator
)
}
};
self.pending_messages_to_pty
.push(color_response_message.as_bytes().to_vec());
}
dynamic_code += 1;
}
return;
}
}
}
// Set cursor style.
b"50" => {
if params.len() >= 2
&& params[1].len() >= 13
&& params[1][0..12] == *b"CursorShape="
{
let shape = match params[1][12] as char {
'0' => Some(CursorShape::Block),
'1' => Some(CursorShape::Beam),
'2' => Some(CursorShape::Underline),
_ => None,
};
if let Some(cursor_shape) = shape {
self.cursor.change_shape(cursor_shape);
}
}
}
// Set clipboard.
b"52" => {
if params.len() < 3 {
return;
}
let _clipboard = params[1].get(0).unwrap_or(&b'c');
match params[2] {
b"?" => {
// TBD: paste from own clipboard - currently unsupported
}
_base64 => {
// TBD: copy to own clipboard - currently unsupported
}
}
}
// Reset color index.
b"104" => {
// Reset all color indexes when no parameters are given.
if params.len() == 1 {
// TBD - reset all color changes - currently unsupported
return;
}
// Reset color indexes given as parameters.
for param in ¶ms[1..] {
if let Some(_index) = parse_number(param) {
// TBD - reset color index - currently unimplemented
}
}
}
// Reset foreground color.
b"110" => {
// TBD - reset foreground color - currently unimplemented
}
// Reset background color.
b"111" => {
// TBD - reset background color - currently unimplemented
}
// Reset text cursor color.
b"112" => {
// TBD - reset text cursor color - currently unimplemented
}
_ => {}
}
}
fn csi_dispatch(&mut self, params: &Params, intermediates: &[u8], _ignore: bool, c: char) {
let mut params_iter = params.iter();
let mut next_param_or = |default: u16| {
params_iter
.next()
.map(|param| param[0])
.filter(|¶m| param != 0)
.unwrap_or(default) as usize
};
if c == 'm' {
self.cursor
.pending_styles
.add_style_from_ansi_params(&mut params_iter);
} else if c == 'C' || c == 'a' {
// move cursor forward
let move_by = next_param_or(1);
self.move_cursor_forward_until_edge(move_by);
} else if c == 'K' {
// clear line (0 => right, 1 => left, 2 => all)
if let Some(clear_type) = params_iter.next().map(|param| param[0]) {
if clear_type == 0 {
let mut char_to_replace = EMPTY_TERMINAL_CHARACTER;
char_to_replace.styles = self.cursor.pending_styles;
self.replace_characters_in_line_after_cursor(char_to_replace);
} else if clear_type == 1 {
let mut char_to_replace = EMPTY_TERMINAL_CHARACTER;
char_to_replace.styles = self.cursor.pending_styles;
self.replace_characters_in_line_before_cursor(char_to_replace);
} else if clear_type == 2 {
self.clear_cursor_line();
}
};
} else if c == 'J' {
// clear all (0 => below, 1 => above, 2 => all, 3 => saved)
let mut char_to_replace = EMPTY_TERMINAL_CHARACTER;
char_to_replace.styles = self.cursor.pending_styles;
if let Some(clear_type) = params_iter.next().map(|param| param[0]) {
if clear_type == 0 {
self.clear_all_after_cursor(char_to_replace);
} else if clear_type == 1 {
self.clear_all_before_cursor(char_to_replace);
} else if clear_type == 2 {
self.fill_viewport(char_to_replace);
}
};
} else if c == 'H' || c == 'f' {
// goto row/col
// we subtract 1 from the row/column because these are 1 indexed
let row = next_param_or(1).saturating_sub(1);
let col = next_param_or(1).saturating_sub(1);
let pad_character = EMPTY_TERMINAL_CHARACTER;
self.move_cursor_to(col, row, pad_character);
} else if c == 'A' {
// move cursor up until edge of screen
let move_up_count = next_param_or(1);
self.move_cursor_up(move_up_count as usize);
} else if c == 'B' || c == 'e' {
// move cursor down until edge of screen
let move_down_count = next_param_or(1);
let pad_character = EMPTY_TERMINAL_CHARACTER;
self.move_cursor_down(move_down_count as usize, pad_character);
} else if c == 'D' {
let move_back_count = next_param_or(1);
self.move_cursor_back(move_back_count);
} else if c == 'l' {
let first_intermediate_is_questionmark = match intermediates.get(0) {
Some(b'?') => true,
None => false,
_ => false,
};
if first_intermediate_is_questionmark {
match params_iter.next().map(|param| param[0]) {
Some(1049) => {
if let Some((
alternative_lines_above,
alternative_viewport,
alternative_cursor,
)) = self.alternative_lines_above_viewport_and_cursor.as_mut()
{
std::mem::swap(&mut self.lines_above, alternative_lines_above);
std::mem::swap(&mut self.viewport, alternative_viewport);
std::mem::swap(&mut self.cursor, alternative_cursor);
}
self.alternative_lines_above_viewport_and_cursor = None;
self.clear_viewport_before_rendering = true;
self.change_size(self.height, self.width); // the alternative_viewport might have been of a different size...
self.mark_for_rerender();
}
Some(25) => {
self.hide_cursor();
self.mark_for_rerender();
}
Some(1) => {
self.cursor_key_mode = false;
}
Some(3) => {
// DECCOLM - only side effects
self.scroll_region = None;
self.clear_all(EMPTY_TERMINAL_CHARACTER);
self.cursor.x = 0;
self.cursor.y = 0;
}
Some(6) => {
self.erasure_mode = false;
}
Some(7) => {
self.disable_linewrap = true;
}
_ => {}
};
} else if let Some(4) = params_iter.next().map(|param| param[0]) {
self.insert_mode = false;
}
} else if c == 'h' {
let first_intermediate_is_questionmark = match intermediates.get(0) {
Some(b'?') => true,
None => false,
_ => false,
};
if first_intermediate_is_questionmark {
match params_iter.next().map(|param| param[0]) {
Some(25) => {
self.show_cursor();
self.mark_for_rerender();
}
Some(1049) => {
let current_lines_above = std::mem::replace(
&mut self.lines_above,
VecDeque::with_capacity(SCROLL_BACK),
);
let current_viewport =
std::mem::replace(&mut self.viewport, vec![Row::new().canonical()]);
let current_cursor = std::mem::replace(&mut self.cursor, Cursor::new(0, 0));
self.alternative_lines_above_viewport_and_cursor =
Some((current_lines_above, current_viewport, current_cursor));
self.clear_viewport_before_rendering = true;
}
Some(1) => {
self.cursor_key_mode = true;
}
Some(3) => {
// DECCOLM - only side effects
self.scroll_region = None;
self.clear_all(EMPTY_TERMINAL_CHARACTER);
self.cursor.x = 0;
self.cursor.y = 0;
}
Some(6) => {
self.erasure_mode = true;
}
Some(7) => {
self.disable_linewrap = false;
}
_ => {}
};
} else if let Some(4) = params_iter.next().map(|param| param[0]) {
self.insert_mode = true;
}
} else if c == 'r' {
if params.len() > 1 {
let top = (next_param_or(1) as usize).saturating_sub(1);
let bottom = params_iter
.next()
.map(|param| param[0] as usize)
.filter(|¶m| param != 0)
.map(|bottom| bottom.saturating_sub(1));
self.set_scroll_region(top, bottom);
if self.erasure_mode {
self.move_cursor_to_line(top, EMPTY_TERMINAL_CHARACTER);
self.move_cursor_to_beginning_of_line();
}
} else {
self.clear_scroll_region();
}
} else if c == 'M' {
// delete lines if currently inside scroll region
let line_count_to_delete = next_param_or(1);
let pad_character = EMPTY_TERMINAL_CHARACTER;
self.delete_lines_in_scroll_region(line_count_to_delete, pad_character);
} else if c == 'L' {
// insert blank lines if inside scroll region
let line_count_to_add = next_param_or(1);
let pad_character = EMPTY_TERMINAL_CHARACTER;
self.add_empty_lines_in_scroll_region(line_count_to_add, pad_character);
} else if c == 'G' || c == '`' {
let column = next_param_or(1).saturating_sub(1);
self.move_cursor_to_column(column);
} else if c == 'g' {
let clear_type = next_param_or(0);
if clear_type == 0 {
self.clear_tabstop(self.cursor.x);
} else if clear_type == 3 {
self.clear_all_tabstops();
}
} else if c == 'd' {
// goto line
let line = next_param_or(1).saturating_sub(1);
let pad_character = EMPTY_TERMINAL_CHARACTER;
self.move_cursor_to_line(line, pad_character);
} else if c == 'P' {
// erase characters
let count = next_param_or(1);
self.erase_characters(count, self.cursor.pending_styles);
} else if c == 'X' {
// erase characters and replace with empty characters of current style
let count = next_param_or(1);
self.replace_with_empty_chars(count, self.cursor.pending_styles);
} else if c == 'T' {
/*
* 124 54 T SD
* Scroll down, new lines inserted at top of screen
* [4T = Scroll down 4, bring previous lines back into view
*/
let line_count = next_param_or(1);
self.rotate_scroll_region_up(line_count as usize);
} else if c == 'S' {
// move scroll up
let count = next_param_or(1);
self.rotate_scroll_region_down(count);
} else if c == 's' {
self.save_cursor_position();
} else if c == 'u' {
self.restore_cursor_position();
} else if c == '@' {
let count = next_param_or(1);
for _ in 0..count {
// TODO: should this be styled?
self.insert_character_at_cursor_position(EMPTY_TERMINAL_CHARACTER);
}
} else if c == 'b' {
if let Some(c) = self.preceding_char {
for _ in 0..next_param_or(1) {
self.add_character(c);
}
}
} else if c == 'E' {
let count = next_param_or(1);
let pad_character = EMPTY_TERMINAL_CHARACTER;
self.move_cursor_down(count, pad_character);
} else if c == 'F' {
let count = next_param_or(1);
self.move_cursor_up(count);
self.move_cursor_to_beginning_of_line();
} else if c == 'I' {
for _ in 0..next_param_or(1) {
self.advance_to_next_tabstop(self.cursor.pending_styles);
}
} else if c == 'q' {
let first_intermediate_is_space = matches!(intermediates.get(0), Some(b' '));
if first_intermediate_is_space {
// DECSCUSR (CSI Ps SP q) -- Set Cursor Style.
let cursor_style_id = next_param_or(0);
let shape = match cursor_style_id {
0 | 2 => Some(CursorShape::Block),
1 => Some(CursorShape::BlinkingBlock),
3 => Some(CursorShape::BlinkingUnderline),
4 => Some(CursorShape::Underline),
5 => Some(CursorShape::BlinkingBeam),
6 => Some(CursorShape::Beam),
_ => None,
};
if let Some(cursor_shape) = shape {
self.cursor.change_shape(cursor_shape);
}
}
} else if c == 'Z' {
for _ in 0..next_param_or(1) {
self.move_to_previous_tabstop();
}
} else if c == 'c' {
// identify terminal
// https://vt100.net/docs/vt510-rm/DA1.html
match intermediates.get(0) {
None | Some(0) => {
// primary device attributes
let terminal_capabilities = "\u{1b}[?6c";
self.pending_messages_to_pty
.push(terminal_capabilities.as_bytes().to_vec());
}
Some(b'>') => {
// secondary device attributes
let version = version_number(VERSION);
let text = format!("\u{1b}[>0;{};1c", version);
self.pending_messages_to_pty.push(text.as_bytes().to_vec());
}
_ => {}
}
} else if c == 'n' {
// DSR - device status report
// https://vt100.net/docs/vt510-rm/DSR.html
match next_param_or(0) {
5 => {
// report terminal status
let all_good = "\u{1b}[0n";
self.pending_messages_to_pty
.push(all_good.as_bytes().to_vec());
}
6 => {
// CPR - cursor position report
let position_report =
format!("\x1b[{};{}R", self.cursor.y + 1, self.cursor.x + 1);
self.pending_messages_to_pty
.push(position_report.as_bytes().to_vec());
}
_ => {}
}
} else if c == 't' {
match next_param_or(1) as usize {
14 => {
// TODO: report text area size in pixels, currently unimplemented
// to solve this we probably need to query the user's terminal for the cursor
// size and then use it as a multiplier
}
18 => {
// report text area
let text_area_report = format!("\x1b[8;{};{}t", self.height, self.width);
self.pending_messages_to_pty
.push(text_area_report.as_bytes().to_vec());
}
22 => {
// TODO: push title
}
23 => {
// TODO: pop title
}
_ => {}
}
} else {
let result = debug_log_to_file(format!("Unhandled csi: {}->{:?}", c, params));
#[cfg(not(any(feature = "test", test)))]
result.unwrap();
}
}
fn esc_dispatch(&mut self, intermediates: &[u8], _ignore: bool, byte: u8) {
match (byte, intermediates.get(0)) {
(b'B', charset_index_symbol) => {
let charset_index: CharsetIndex = match charset_index_symbol {
Some(b'(') => CharsetIndex::G0,
Some(b')') => CharsetIndex::G1,
Some(b'*') => CharsetIndex::G2,
Some(b'+') => CharsetIndex::G3,
_ => {
// invalid, silently do nothing
return;
}
};
self.configure_charset(StandardCharset::Ascii, charset_index);
}
(b'0', charset_index_symbol) => {
let charset_index: CharsetIndex = match charset_index_symbol {
Some(b'(') => CharsetIndex::G0,
Some(b')') => CharsetIndex::G1,
Some(b'*') => CharsetIndex::G2,
Some(b'+') => CharsetIndex::G3,
_ => {
// invalid, silently do nothing
return;
}
};
self.configure_charset(
StandardCharset::SpecialCharacterAndLineDrawing,
charset_index,
);
}
(b'D', None) => {
self.add_newline();
}
(b'E', None) => {
self.add_newline();
self.move_cursor_to_beginning_of_line();
}
(b'M', None) => {
// TODO: if cursor is at the top, it should go down one
self.move_cursor_up_with_scrolling(1);
}
(b'c', None) => {
self.reset_terminal_state();
}
(b'H', None) => {
self.set_horizontal_tabstop();
}
(b'7', None) => {
self.save_cursor_position();
}
(b'Z', None) => {
let terminal_capabilities = "\u{1b}[?6c";
self.pending_messages_to_pty
.push(terminal_capabilities.as_bytes().to_vec());
}
(b'8', None) => {
self.restore_cursor_position();
}
(b'8', Some(b'#')) => {
let mut fill_character = EMPTY_TERMINAL_CHARACTER;
fill_character.character = 'E';
self.fill_viewport(fill_character);
}
_ => {}
}
}
}
#[derive(Clone)]
pub struct Row {
pub columns: Vec<TerminalCharacter>,
pub is_canonical: bool,
}
impl Debug for Row {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
for character in &self.columns {
write!(f, "{:?}", character)?;
}
Ok(())
}
}
impl Default for Row {
fn default() -> Self {
Row {
columns: vec![],
is_canonical: false,
}
}
}
impl Row {
pub fn new() -> Self {
Self::default()
}
pub fn from_columns(columns: Vec<TerminalCharacter>) -> Self {
Row {
columns,
is_canonical: false,
}
}
pub fn from_rows(mut rows: Vec<Row>) -> Self {
if rows.is_empty() {
Row::new()
} else {
let mut first_row = rows.remove(0);
for row in rows.iter_mut() {
first_row.append(&mut row.columns);
}
first_row
}
}
pub fn with_character(mut self, terminal_character: TerminalCharacter) -> Self {
self.columns.push(terminal_character);
self
}
pub fn canonical(mut self) -> Self {
self.is_canonical = true;
self
}
pub fn width(&self) -> usize {
let mut width = 0;
for terminal_character in self.columns.iter() {
width += terminal_character.width;
}
width
}
pub fn excess_width(&self) -> usize {
let mut acc = 0;
for terminal_character in self.columns.iter() {
if terminal_character.width > 1 {
acc += terminal_character.width - 1;
}
}
acc
}
pub fn excess_width_until(&self, x: usize) -> usize {
let mut acc = 0;
for terminal_character in self.columns.iter().take(x) {
if terminal_character.width > 1 {
acc += terminal_character.width - 1;
}
}
acc
}
pub fn add_character_at(&mut self, terminal_character: TerminalCharacter, x: usize) {
match self.width().cmp(&x) {
Ordering::Equal => {
self.columns.push(terminal_character);
}
Ordering::Less => {
let width_offset = self.excess_width_until(x);
self.columns
.resize(x.saturating_sub(width_offset), EMPTY_TERMINAL_CHARACTER);
self.columns.push(terminal_character);
}
Ordering::Greater => {
let width_offset = self.excess_width_until(x);
// this is much more performant than remove/insert
self.columns.push(terminal_character);
self.columns.swap_remove(x.saturating_sub(width_offset));
}
}
}
pub fn insert_character_at(&mut self, terminal_character: TerminalCharacter, x: usize) {
match self.columns.len().cmp(&x) {
Ordering::Equal => self.columns.push(terminal_character),
Ordering::Less => {
self.columns.resize(x, EMPTY_TERMINAL_CHARACTER);
self.columns.push(terminal_character);
}
Ordering::Greater => {
self.columns.insert(x, terminal_character);
}
}
}
pub fn replace_character_at(&mut self, terminal_character: TerminalCharacter, x: usize) {
// this is much more performant than remove/insert
if x < self.columns.len() {
self.columns.push(terminal_character);
let character = self.columns.swap_remove(x);
let excess_width = character.width.saturating_sub(1);
for _ in 0..excess_width {
self.columns.insert(x, terminal_character);
}
}
}
pub fn replace_columns(&mut self, columns: Vec<TerminalCharacter>) {
self.columns = columns;
}
pub fn push(&mut self, terminal_character: TerminalCharacter) {
self.columns.push(terminal_character);
}
pub fn truncate(&mut self, x: usize) {
let width_offset = self.excess_width_until(x);
let truncate_position = x.saturating_sub(width_offset);
if truncate_position < self.columns.len() {
self.columns.truncate(truncate_position);
}
}
pub fn position_accounting_for_widechars(&self, x: usize) -> usize {
let mut position = x;
for (index, terminal_character) in self.columns.iter().enumerate() {
if index == position {
break;
}
if terminal_character.width > 1 {
position = position.saturating_sub(terminal_character.width.saturating_sub(1));
}
}
position
}
pub fn replace_and_pad_end(
&mut self,
from: usize,
to: usize,
terminal_character: TerminalCharacter,
) {
let from_position_accounting_for_widechars = self.position_accounting_for_widechars(from);
let to_position_accounting_for_widechars = self.position_accounting_for_widechars(to);
let replacement_length = to_position_accounting_for_widechars
.saturating_sub(from_position_accounting_for_widechars);
let mut replace_with = vec![terminal_character; replacement_length];
self.columns
.truncate(from_position_accounting_for_widechars);
self.columns.append(&mut replace_with);
}
pub fn append(&mut self, to_append: &mut Vec<TerminalCharacter>) {
self.columns.append(to_append);
}
pub fn drain_until(&mut self, x: usize) -> Vec<TerminalCharacter> {
let mut drained_part: Vec<TerminalCharacter> = vec![];
let mut drained_part_len = 0;
loop {
if self.columns.is_empty() {
break;
}
let next_character_len = self.columns.get(0).unwrap().width;
if drained_part_len + next_character_len <= x {
drained_part.push(self.columns.remove(0));
drained_part_len += next_character_len;
} else {
break;
}
}
drained_part
}
pub fn replace_and_pad_beginning(&mut self, to: usize, terminal_character: TerminalCharacter) {
let to_position_accounting_for_widechars = self.position_accounting_for_widechars(to);
let width_of_current_character = self
.columns
.get(to_position_accounting_for_widechars)
.map(|character| character.width)
.unwrap_or(1);
let mut replace_with = vec![terminal_character; to + width_of_current_character];
if to_position_accounting_for_widechars > self.columns.len() {
self.columns.clear();
} else {
drop(self.columns.drain(0..=to_position_accounting_for_widechars));
}
replace_with.append(&mut self.columns);
self.columns = replace_with;
}
pub fn replace_beginning_with(&mut self, mut line_part: Vec<TerminalCharacter>) {
// this assumes line_part has no wide characters
if line_part.len() > self.columns.len() {
self.columns.clear();
} else {
drop(self.columns.drain(0..line_part.len()));
}
line_part.append(&mut self.columns);
self.columns = line_part;
}
pub fn len(&self) -> usize {
self.columns.len()
}
pub fn is_empty(&self) -> bool {
self.columns.is_empty()
}
pub fn delete_and_return_character(&mut self, x: usize) -> Option<TerminalCharacter> {
if x < self.columns.len() {
Some(self.columns.remove(x))
} else {
None
}
}
pub fn split_to_rows_of_length(&mut self, max_row_length: usize) -> Vec<Row> {
let mut parts: Vec<Row> = vec![];
let mut current_part: Vec<TerminalCharacter> = vec![];
let mut current_part_len = 0;
for character in self.columns.drain(..) {
if current_part_len + character.width > max_row_length {
parts.push(Row::from_columns(current_part));
current_part = vec![];
current_part_len = 0;
}
current_part.push(character);
current_part_len += character.width;
}
if !current_part.is_empty() {
parts.push(Row::from_columns(current_part))
};
if !parts.is_empty() && self.is_canonical {
parts.get_mut(0).unwrap().is_canonical = true;
}
parts
}
}
#[cfg(test)]
#[path = "./unit/grid_tests.rs"]
mod grid_tests;
| 39.759168 | 147 | 0.515474 |
617015bcfd93447c3197b5481d55883560977111 | 5,943 | #[doc = "Register `PUBLISH_FIELDDETECTED` reader"]
pub struct R(crate::R<PUBLISH_FIELDDETECTED_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PUBLISH_FIELDDETECTED_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PUBLISH_FIELDDETECTED_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PUBLISH_FIELDDETECTED_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PUBLISH_FIELDDETECTED` writer"]
pub struct W(crate::W<PUBLISH_FIELDDETECTED_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PUBLISH_FIELDDETECTED_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PUBLISH_FIELDDETECTED_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PUBLISH_FIELDDETECTED_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CHIDX` reader - DPPI channel that event FIELDDETECTED will publish to."]
pub struct CHIDX_R(crate::FieldReader<u8, u8>);
impl CHIDX_R {
pub(crate) fn new(bits: u8) -> Self {
CHIDX_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CHIDX_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CHIDX` writer - DPPI channel that event FIELDDETECTED will publish to."]
pub struct CHIDX_W<'a> {
w: &'a mut W,
}
impl<'a> CHIDX_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | (value as u32 & 0xff);
self.w
}
}
#[doc = "\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EN_A {
#[doc = "0: Disable publishing"]
DISABLED = 0,
#[doc = "1: Enable publishing"]
ENABLED = 1,
}
impl From<EN_A> for bool {
#[inline(always)]
fn from(variant: EN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EN` reader - "]
pub struct EN_R(crate::FieldReader<bool, EN_A>);
impl EN_R {
pub(crate) fn new(bits: bool) -> Self {
EN_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EN_A {
match self.bits {
false => EN_A::DISABLED,
true => EN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == EN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == EN_A::ENABLED
}
}
impl core::ops::Deref for EN_R {
type Target = crate::FieldReader<bool, EN_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `EN` writer - "]
pub struct EN_W<'a> {
w: &'a mut W,
}
impl<'a> EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EN_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable publishing"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(EN_A::DISABLED)
}
#[doc = "Enable publishing"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(EN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - DPPI channel that event FIELDDETECTED will publish to."]
#[inline(always)]
pub fn chidx(&self) -> CHIDX_R {
CHIDX_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn en(&self) -> EN_R {
EN_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:7 - DPPI channel that event FIELDDETECTED will publish to."]
#[inline(always)]
pub fn chidx(&mut self) -> CHIDX_W {
CHIDX_W { w: self }
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn en(&mut self) -> EN_W {
EN_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Publish configuration for event FIELDDETECTED\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [publish_fielddetected](index.html) module"]
pub struct PUBLISH_FIELDDETECTED_SPEC;
impl crate::RegisterSpec for PUBLISH_FIELDDETECTED_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [publish_fielddetected::R](R) reader structure"]
impl crate::Readable for PUBLISH_FIELDDETECTED_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [publish_fielddetected::W](W) writer structure"]
impl crate::Writable for PUBLISH_FIELDDETECTED_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PUBLISH_FIELDDETECTED to value 0"]
impl crate::Resettable for PUBLISH_FIELDDETECTED_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 30.321429 | 447 | 0.594144 |
f8314eba5ca367380035ec1191229e0619a92ad8 | 1,474 | use cadence::{BufferedUdpMetricSink, QueuingMetricSink, StatsdClient, UdpMetricSink, DEFAULT_PORT};
use std::net::UdpSocket;
mod utils;
use utils::run_arc_threaded_test;
const TARGET_HOST: (&str, u16) = ("127.0.0.1", DEFAULT_PORT);
fn new_udp_client(prefix: &str) -> StatsdClient {
let socket = UdpSocket::bind("0.0.0.0:0").unwrap();
let sink = UdpMetricSink::from(TARGET_HOST, socket).unwrap();
StatsdClient::from_sink(prefix, sink)
}
fn new_buffered_udp_client(prefix: &str) -> StatsdClient {
let socket = UdpSocket::bind("0.0.0.0:0").unwrap();
let sink = BufferedUdpMetricSink::from(TARGET_HOST, socket).unwrap();
StatsdClient::from_sink(prefix, sink)
}
fn new_queuing_buffered_udp_client(prefix: &str) -> StatsdClient {
let socket = UdpSocket::bind("0.0.0.0:0").unwrap();
let buffered = BufferedUdpMetricSink::from(TARGET_HOST, socket).unwrap();
let sink = QueuingMetricSink::from(buffered);
StatsdClient::from_sink(prefix, sink)
}
#[test]
fn test_statsd_client_udp_sink_single_threaded() {
let client = new_udp_client("cadence");
run_arc_threaded_test(client, 1, 1);
}
#[test]
fn test_statsd_client_buffered_udp_sink_single_threaded() {
let client = new_buffered_udp_client("cadence");
run_arc_threaded_test(client, 1, 1);
}
#[test]
fn test_statsd_client_queuing_buffered_udp_sink_single_threaded() {
let client = new_queuing_buffered_udp_client("cadence");
run_arc_threaded_test(client, 1, 1);
}
| 32.755556 | 99 | 0.734057 |
0e5fa0c0bfdcf6aa19496f2e8465de9ebd7bbbf2 | 10,930 | extern crate tokenizers as tk;
use crate::extraction::*;
use neon::prelude::*;
use serde::{ser::SerializeStruct, Serialize, Serializer};
use std::sync::Arc;
use tk::normalizers::NormalizerWrapper;
use tk::NormalizedString;
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
pub enum JsNormalizerWrapper {
Sequence(Vec<Arc<NormalizerWrapper>>),
Wrapped(Arc<NormalizerWrapper>),
}
impl Serialize for JsNormalizerWrapper {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
match self {
JsNormalizerWrapper::Sequence(seq) => {
let mut ser = serializer.serialize_struct("Sequence", 2)?;
ser.serialize_field("type", "Sequence")?;
ser.serialize_field("normalizers", seq)?;
ser.end()
}
JsNormalizerWrapper::Wrapped(inner) => inner.serialize(serializer),
}
}
}
impl<I> From<I> for JsNormalizerWrapper
where
I: Into<NormalizerWrapper>,
{
fn from(norm: I) -> Self {
JsNormalizerWrapper::Wrapped(Arc::new(norm.into()))
}
}
/// Normalizer
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Normalizer {
#[serde(flatten)]
pub normalizer: Option<JsNormalizerWrapper>,
}
impl tk::Normalizer for Normalizer {
fn normalize(&self, normalized: &mut NormalizedString) -> tk::Result<()> {
match self.normalizer.as_ref().ok_or("Uninitialized Normalizer")? {
JsNormalizerWrapper::Sequence(seq) => {
for norm in seq {
norm.normalize(normalized)?;
}
}
JsNormalizerWrapper::Wrapped(norm) => norm.normalize(normalized)?,
};
Ok(())
}
}
declare_types! {
pub class JsNormalizer for Normalizer {
init(_) {
// This should not be called from JS
Ok(Normalizer { normalizer: None })
}
method normalizeString(mut cx) {
use tk::Normalizer;
let sequence = cx.extract::<String>(0)?;
let mut normalized = NormalizedString::from(sequence);
let this = cx.this();
let guard = cx.lock();
this.borrow(&guard)
.normalize(&mut normalized)
.map_err(|e| Error(format!("{}", e)))?;
Ok(cx.string(normalized.get()).upcast())
}
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct BertNormalizerOptions {
clean_text: bool,
handle_chinese_chars: bool,
strip_accents: Option<bool>,
lowercase: bool,
}
impl Default for BertNormalizerOptions {
fn default() -> Self {
Self {
clean_text: true,
handle_chinese_chars: true,
strip_accents: None,
lowercase: true,
}
}
}
/// bert_normalizer(options?: {
/// cleanText?: bool = true,
/// handleChineseChars?: bool = true,
/// stripAccents?: bool = true,
/// lowercase?: bool = true
/// })
fn bert_normalizer(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let options = cx
.extract_opt::<BertNormalizerOptions>(0)?
.unwrap_or_default();
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(
tk::normalizers::bert::BertNormalizer::new(
options.clean_text,
options.handle_chinese_chars,
options.strip_accents,
options.lowercase,
)
.into(),
);
Ok(normalizer)
}
/// nfd()
fn nfd(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::unicode::NFD.into());
Ok(normalizer)
}
/// nfkd()
fn nfkd(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::unicode::NFKD.into());
Ok(normalizer)
}
/// nfc()
fn nfc(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::unicode::NFC.into());
Ok(normalizer)
}
/// nfkc()
fn nfkc(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::unicode::NFKC.into());
Ok(normalizer)
}
/// strip(left?: boolean, right?: boolean)
fn strip(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let left = cx.extract_opt::<bool>(0)?.unwrap_or(true);
let right = cx.extract_opt::<bool>(1)?.unwrap_or(true);
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer =
Some(tk::normalizers::strip::Strip::new(left, right).into());
Ok(normalizer)
}
/// strip_accents()
fn strip_accents(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::strip::StripAccents.into());
Ok(normalizer)
}
/// sequence(normalizers: Normalizer[])
fn sequence(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let normalizers = cx.argument::<JsArray>(0)?.to_vec(&mut cx)?;
let mut sequence = Vec::with_capacity(normalizers.len());
normalizers.into_iter().try_for_each(|normalizer| {
match normalizer.downcast::<JsNormalizer>().or_throw(&mut cx) {
Ok(normalizer) => {
let guard = cx.lock();
let normalizer = normalizer.borrow(&guard).normalizer.clone();
if let Some(normalizer) = normalizer {
match normalizer {
JsNormalizerWrapper::Sequence(seq) => sequence.extend(seq),
JsNormalizerWrapper::Wrapped(inner) => sequence.push(inner),
}
Ok(())
} else {
cx.throw_error("Uninitialized Normalizer")
}
}
Err(e) => Err(e),
}
})?;
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(JsNormalizerWrapper::Sequence(sequence));
Ok(normalizer)
}
/// lowercase()
fn lowercase(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::utils::Lowercase.into());
Ok(normalizer)
}
/// replace()
fn replace(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let pattern: String = cx.extract::<String>(0)?;
let content: String = cx.extract::<String>(1)?;
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(
tk::normalizers::replace::Replace::new(pattern, content)
.map_err(|e| Error(e.to_string()))?
.into(),
);
Ok(normalizer)
}
/// nmt()
fn nmt(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(tk::normalizers::unicode::Nmt.into());
Ok(normalizer)
}
/// precompiled()
fn precompiled(mut cx: FunctionContext) -> JsResult<JsNormalizer> {
let bytes = cx.extract::<Vec<u8>>(0)?;
let mut normalizer = JsNormalizer::new::<_, JsNormalizer, _>(&mut cx, vec![])?;
let guard = cx.lock();
normalizer.borrow_mut(&guard).normalizer = Some(
tk::normalizers::precompiled::Precompiled::from(&bytes)
.map_err(|e| Error(e.to_string()))?
.into(),
);
Ok(normalizer)
}
/// Register everything here
pub fn register(m: &mut ModuleContext, prefix: &str) -> NeonResult<()> {
m.export_function(&format!("{}_BertNormalizer", prefix), bert_normalizer)?;
m.export_function(&format!("{}_NFD", prefix), nfd)?;
m.export_function(&format!("{}_NFKD", prefix), nfkd)?;
m.export_function(&format!("{}_NFC", prefix), nfc)?;
m.export_function(&format!("{}_NFKC", prefix), nfkc)?;
m.export_function(&format!("{}_Sequence", prefix), sequence)?;
m.export_function(&format!("{}_Lowercase", prefix), lowercase)?;
m.export_function(&format!("{}_Strip", prefix), strip)?;
m.export_function(&format!("{}_StripAccents", prefix), strip_accents)?;
m.export_function(&format!("{}_Nmt", prefix), nmt)?;
m.export_function(&format!("{}_Precompiled", prefix), precompiled)?;
m.export_function(&format!("{}_Replace", prefix), replace)?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
use tk::normalizers::unicode::{NFC, NFKC};
use tk::normalizers::utils::Sequence;
use tk::normalizers::NormalizerWrapper;
#[test]
fn serialize() {
let js_wrapped: JsNormalizerWrapper = NFKC.into();
let js_ser = serde_json::to_string(&js_wrapped).unwrap();
let rs_wrapped = NormalizerWrapper::NFKC(NFKC);
let rs_ser = serde_json::to_string(&rs_wrapped).unwrap();
assert_eq!(js_ser, rs_ser);
let js_norm: Normalizer = serde_json::from_str(&rs_ser).unwrap();
match js_norm.normalizer.unwrap() {
JsNormalizerWrapper::Wrapped(nfc) => match nfc.as_ref() {
NormalizerWrapper::NFKC(_) => {}
_ => panic!("Expected NFKC"),
},
_ => panic!("Expected wrapped, not sequence."),
}
let js_seq: JsNormalizerWrapper = Sequence::new(vec![NFC.into(), NFKC.into()]).into();
let js_wrapper_ser = serde_json::to_string(&js_seq).unwrap();
let rs_wrapped = NormalizerWrapper::Sequence(Sequence::new(vec![NFC.into(), NFKC.into()]));
let rs_ser = serde_json::to_string(&rs_wrapped).unwrap();
assert_eq!(js_wrapper_ser, rs_ser);
let js_seq = Normalizer {
normalizer: Some(js_seq),
};
let js_ser = serde_json::to_string(&js_seq).unwrap();
assert_eq!(js_wrapper_ser, js_ser);
let rs_seq = Sequence::new(vec![NFC.into(), NFKC.into()]);
let rs_ser = serde_json::to_string(&rs_seq).unwrap();
assert_eq!(js_wrapper_ser, rs_ser);
}
}
| 34.263323 | 100 | 0.611619 |
0939fe14d646bdf24c01bba06272daa6debc7515 | 4,548 | #[cfg(test)]
mod file_editor {
use sdl2::pixels::*;
use sdl2::rect::*;
use std::sync::*;
#[test]
fn add_text() {
use crate::tests::support;
let config = support::build_config();
let canvas = support::build_canvas();
let font_context = sdl2::ttf::init().unwrap();
let texture_creator = canvas.texture_creator();
let mut renderer = Renderer::new(Arc::clone(&config), &font_context, &texture_creator);
let mut editor = FileEditor::new(Arc::clone(&config));
let mut file = EditorFile::new("./foo.txt".to_string(), "foo".to_string(), config.clone());
file.prepare_ui(&mut renderer);
assert_eq!(editor.open_file(file).is_none(), true);
assert_eq!(editor.caret().position().text_position(), 0);
assert_eq!(editor.file().is_some(), true);
assert_eq!(editor.file().unwrap().sections().len(), 1);
assert_eq!(editor.file().unwrap().get_character_at(0).is_some(), true);
editor.insert_text("z".to_string(), &mut renderer);
assert_eq!(editor.caret().position().text_position(), 1);
assert_eq!(editor.file().is_some(), true);
assert_eq!(editor.file().unwrap().buffer(), "zfoo".to_string());
}
}
#[cfg(test)]
mod text_character {
use sdl2::pixels::*;
use sdl2::rect::*;
use std::sync::*;
#[test]
fn must_return_valid_source() {
let config = support::build_config();
let canvas = support::build_canvas();
let font_context = sdl2::ttf::init().unwrap();
let texture_creator = canvas.texture_creator();
let mut renderer = Renderer::new(Arc::clone(&config), &font_context, &texture_creator);
let mut widget =
TextCharacter::new('\n', 0, 0, true, Color::RGB(1, 12, 123), Arc::clone(&config));
widget.prepare_ui(&mut renderer);
assert_eq!(widget.source(), &Rect::new(0, 0, 0, 0));
}
#[test]
fn must_return_valid_dest() {
let config = support::build_config();
let canvas = support::build_canvas();
let font_context = sdl2::ttf::init().unwrap();
let texture_creator = canvas.texture_creator();
let mut renderer = Renderer::new(Arc::clone(&config), &font_context, &texture_creator);
let mut widget =
TextCharacter::new('\n', 0, 0, true, Color::RGB(1, 12, 123), Arc::clone(&config));
widget.prepare_ui(&mut renderer);
assert_eq!(widget.dest(), &Rect::new(0, 0, 0, 0));
}
#[test]
fn must_return_valid_color() {
let config = support::build_config();
let canvas = support::build_canvas();
let font_context = sdl2::ttf::init().unwrap();
let texture_creator = canvas.texture_creator();
let mut renderer = Renderer::new(Arc::clone(&config), &font_context, &texture_creator);
let mut widget =
TextCharacter::new('\n', 0, 0, true, Color::RGB(1, 12, 123), Arc::clone(&config));
widget.prepare_ui(&mut renderer);
assert_eq!(widget.color(), &Color::RGB(1, 12, 123));
}
#[test]
fn must_update_position_of_new_line() {
let config = support::build_config();
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let window = video_subsystem
.window("Test", 1, 1)
.borderless()
.opengl()
.build()
.unwrap();
let canvas = window.into_canvas().accelerated().build().unwrap();
let font_context = sdl2::ttf::init().unwrap();
let texture_creator = canvas.texture_creator();
let mut renderer = Renderer::new(Arc::clone(&config), &font_context, &texture_creator);
let mut widget =
TextCharacter::new('\n', 0, 0, true, Color::RGB(0, 0, 0), Arc::clone(&config));
widget.prepare_ui(&mut renderer);
let mut current = Rect::new(0, 0, 0, 0);
widget.update_position(&mut current);
assert_eq!(current, Rect::new(0, 0, 0, 0));
assert_eq!(widget.dest(), &Rect::new(0, 0, 0, 0));
}
}
#[cfg(test)]
fn main() {
let config = support::build_config();
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let window = video_subsystem
.window("Test", 1, 1)
.borderless()
.opengl()
.build()
.unwrap();
let canvas = window.into_canvas().accelerated().build().unwrap();
}
| 34.717557 | 99 | 0.583333 |
09174828689ee29f35e40b3bcba6d2794cfc4d6f | 15,204 | use crate::unicorn::NodeRef;
//
// Public Interface
//
#[allow(dead_code)]
#[derive(Debug, Eq, PartialEq)]
pub enum Solution {
Sat,
Unsat,
Timeout,
}
pub trait Solver {
fn new() -> Self;
fn name() -> &'static str;
fn solve(&mut self, root: &NodeRef) -> Solution;
fn is_always_true(&mut self, node: &NodeRef) -> bool;
fn is_always_false(&mut self, node: &NodeRef) -> bool;
fn is_always_equal(&mut self, left: &NodeRef, right: &NodeRef) -> bool;
}
//
// Private Implementation
//
// TODO: Move this module into separate file.
pub mod none_impl {
use crate::unicorn::solver::{Solution, Solver};
use crate::unicorn::NodeRef;
pub struct NoneSolver {}
impl Solver for NoneSolver {
fn name() -> &'static str {
"None"
}
fn new() -> Self {
Self {}
}
fn is_always_true(&mut self, _node: &NodeRef) -> bool {
false
}
fn is_always_false(&mut self, _node: &NodeRef) -> bool {
false
}
fn is_always_equal(&mut self, _left: &NodeRef, _right: &NodeRef) -> bool {
false
}
fn solve(&mut self, _root: &NodeRef) -> Solution {
Solution::Timeout
}
}
}
// TODO: Move this module into separate file.
#[cfg(feature = "boolector")]
pub mod boolector_impl {
use crate::unicorn::solver::{Solution, Solver};
use crate::unicorn::{HashableNodeRef, Node, NodeRef};
use boolector_solver::{
option::{BtorOption, ModelGen, OutputFileFormat},
Btor, SolverResult, BV,
};
use std::collections::HashMap;
use std::rc::Rc;
type BVRef = BV<Rc<Btor>>;
pub struct BoolectorSolver {
solver: Rc<Btor>,
mapping: HashMap<HashableNodeRef, BVRef>,
}
impl Solver for BoolectorSolver {
fn name() -> &'static str {
"Boolector"
}
fn new() -> Self {
let solver = Rc::new(Btor::new());
// TODO: Properly configure the below options.
solver.set_opt(BtorOption::ModelGen(ModelGen::All));
solver.set_opt(BtorOption::Incremental(true));
solver.set_opt(BtorOption::OutputFileFormat(OutputFileFormat::SMTLIBv2));
Self {
solver,
mapping: HashMap::new(),
}
}
fn is_always_true(&mut self, node: &NodeRef) -> bool {
let bv = self.visit(node).not();
self.solve_impl(bv) == Solution::Unsat
}
fn is_always_false(&mut self, node: &NodeRef) -> bool {
let bv = self.visit(node);
self.solve_impl(bv) == Solution::Unsat
}
fn is_always_equal(&mut self, left: &NodeRef, right: &NodeRef) -> bool {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
let bv = bv_left._ne(&bv_right);
self.solve_impl(bv) == Solution::Unsat
}
fn solve(&mut self, root: &NodeRef) -> Solution {
let bv = self.visit(root);
self.solve_impl(bv.slice(0, 0))
}
}
impl BoolectorSolver {
fn solve_impl(&mut self, bv: BVRef) -> Solution {
self.solver.push(1);
bv.assert();
let solution = match self.solver.sat() {
SolverResult::Sat => Solution::Sat,
SolverResult::Unsat => Solution::Unsat,
SolverResult::Unknown => Solution::Timeout,
};
self.solver.pop(1);
solution
}
fn visit(&mut self, node: &NodeRef) -> BVRef {
let key = HashableNodeRef::from(node.clone());
self.mapping.get(&key).cloned().unwrap_or_else(|| {
let value = self.translate(node);
assert!(!self.mapping.contains_key(&key));
self.mapping.insert(key, value.clone());
value
})
}
#[rustfmt::skip]
fn translate(&mut self, node: &NodeRef) -> BVRef {
match &*node.borrow() {
Node::Const { sort, imm, .. } => {
let width = sort.bitsize() as u32;
BV::from_u64(self.solver.clone(), *imm, width)
}
Node::Read { .. } => panic!("missing array logic"),
Node::Write { .. } => panic!("missing array logic"),
Node::Add { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.add(&bv_right)
}
Node::Sub { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.sub(&bv_right)
}
Node::Mul { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.mul(&bv_right)
}
Node::Div { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.udiv(&bv_right)
},
Node::Rem { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.urem(&bv_right)
}
Node::Ult { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.ult(&bv_right)
}
Node::Ext { from, value, .. } => {
let width = from.bitsize() as u32;
let bv_value = self.visit(value);
assert_eq!(bv_value.get_width(), width);
bv_value.uext(64 - width)
}
Node::Ite { sort, cond, left, right, .. } => {
let width = sort.bitsize() as u32;
let bv_cond = self.visit(cond);
let bv_left = self.visit(left);
let bv_right = self.visit(right);
assert_eq!(bv_left.get_width(), width);
assert_eq!(bv_right.get_width(), width);
bv_cond.cond_bv(&bv_left, &bv_right)
}
Node::Eq { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left._eq(&bv_right)
}
Node::And { left, right, .. } => {
let bv_left = self.visit(left);
let bv_right = self.visit(right);
bv_left.and(&bv_right)
}
Node::Not { value, .. } => {
let bv_value = self.visit(value);
bv_value.not()
}
Node::State { sort, name, .. } => {
let width = sort.bitsize() as u32;
BV::new(self.solver.clone(), width, name.as_deref())
}
Node::Input { sort, name, .. } => {
let width = sort.bitsize() as u32;
BV::new(self.solver.clone(), width, Some(name))
}
Node::Next { .. } => panic!("should be unreachable"),
Node::Bad { .. } => panic!("should be unreachable"),
Node::Comment(_) => panic!("cannot translate"),
}
}
}
}
// TODO: Move this module into separate file.
#[cfg(feature = "z3")]
pub mod z3solver_impl {
use crate::unicorn::solver::{Solution, Solver};
use crate::unicorn::{HashableNodeRef, Node, NodeRef, NodeType};
use std::collections::HashMap;
use z3_solver::{
ast::{Ast, Bool, Dynamic, BV},
Config, Context, SatResult, Solver as Z3Solver,
};
pub struct Z3SolverWrapper<'ctx> {
context: &'ctx Context,
solver: Z3Solver<'ctx>,
mapping: HashMap<HashableNodeRef, Dynamic<'ctx>>,
zero: BV<'ctx>,
one: BV<'ctx>,
}
impl<'ctx> Solver for Z3SolverWrapper<'ctx> {
fn name() -> &'static str {
"Z3"
}
fn new() -> Self {
let config = Config::new();
let context = Context::new(&config);
// TODO: This is very funky, avoid leaking context.
let leak: &'ctx Context = Box::leak(Box::new(context));
Self {
context: leak,
solver: Z3Solver::new(leak),
mapping: HashMap::new(),
zero: BV::from_u64(leak, 0, 64),
one: BV::from_u64(leak, 1, 64),
}
}
fn is_always_true(&mut self, node: &NodeRef) -> bool {
let z3_bool = self.visit(node).as_bool().expect("bool").not();
self.solve_impl(&z3_bool) == Solution::Unsat
}
fn is_always_false(&mut self, node: &NodeRef) -> bool {
let z3_bool = self.visit(node).as_bool().expect("bool");
self.solve_impl(&z3_bool) == Solution::Unsat
}
fn is_always_equal(&mut self, left: &NodeRef, right: &NodeRef) -> bool {
let z3_left = Dynamic::from_ast(self.visit(left));
let z3_right = Dynamic::from_ast(self.visit(right));
let z3_bool = z3_left._eq(&z3_right).not();
self.solve_impl(&z3_bool) == Solution::Unsat
}
fn solve(&mut self, root: &NodeRef) -> Solution {
let z3_bool = self.visit(root).as_bool().expect("bool");
self.solve_impl(&z3_bool)
}
}
impl<'ctx> Z3SolverWrapper<'ctx> {
fn solve_impl(&mut self, z3_bool: &Bool<'ctx>) -> Solution {
self.solver.push();
self.solver.assert(z3_bool);
let solution = match self.solver.check() {
SatResult::Sat => Solution::Sat,
SatResult::Unsat => Solution::Unsat,
SatResult::Unknown => Solution::Timeout,
};
self.solver.pop(1);
solution
}
fn visit(&mut self, node: &NodeRef) -> &Dynamic<'ctx> {
let key = HashableNodeRef::from(node.clone());
if !self.mapping.contains_key(&key) {
let value = self.translate(node);
assert!(!self.mapping.contains_key(&key));
self.mapping.insert(key.clone(), value);
}
&self.mapping[&key]
}
#[rustfmt::skip]
fn translate(&mut self, node: &NodeRef) -> Dynamic<'ctx> {
match &*node.borrow() {
Node::Const { sort: NodeType::Bit, imm, .. } => {
Bool::from_bool(self.context, *imm != 0).into()
}
Node::Const { sort, imm, .. } => {
let width = sort.bitsize() as u32;
BV::from_u64(self.context, *imm, width).into()
}
Node::Read { .. } => panic!("missing array logic"),
Node::Write { .. } => panic!("missing array logic"),
Node::Add { left, right, .. } => {
let z3_left = self.visit(left).as_bv().expect("bv");
let z3_right = self.visit(right).as_bv().expect("bv");
z3_left.bvadd(&z3_right).into()
}
Node::Sub { left, right, .. } => {
let z3_left = self.visit(left).as_bv().expect("bv");
let z3_right = self.visit(right).as_bv().expect("bv");
z3_left.bvsub(&z3_right).into()
}
Node::Mul { left, right, .. } => {
let z3_left = self.visit(left).as_bv().expect("bv");
let z3_right = self.visit(right).as_bv().expect("bv");
z3_left.bvmul(&z3_right).into()
}
Node::Div { .. } => panic!("implement DIV"),
Node::Rem { left, right, .. } => {
let z3_left = self.visit(left).as_bv().expect("bv");
let z3_right = self.visit(right).as_bv().expect("bv");
z3_left.bvurem(&z3_right).into()
}
Node::Ult { left, right, .. } => {
let z3_left = self.visit(left).as_bv().expect("bv");
let z3_right = self.visit(right).as_bv().expect("bv");
z3_left.bvult(&z3_right).into()
}
Node::Ext { from: NodeType::Bit, value, .. } => {
let z3_value = self.visit(value).as_bool().expect("bool");
z3_value.ite(&self.zero, &self.one).into()
}
Node::Ext { from, value, .. } => {
let width = from.bitsize() as u32;
let z3_value = self.visit(value).as_bv().expect("bv");
z3_value.zero_ext(64 - width).into()
}
Node::Ite { cond, left, right, .. } => {
let z3_cond = self.visit(cond).as_bool().expect("bool");
let z3_left = Dynamic::from_ast(self.visit(left));
let z3_right = Dynamic::from_ast(self.visit(right));
z3_cond.ite(&z3_left, &z3_right)
}
Node::Eq { left, right, .. } => {
let z3_left = self.visit(left).as_bv().expect("bv");
let z3_right = self.visit(right).as_bv().expect("bv");
z3_left._eq(&z3_right).into()
}
Node::And { left, right, .. } => {
let z3_left = self.visit(left).as_bool().expect("bool");
let z3_right = self.visit(right).as_bool().expect("bool");
Bool::and(self.context, &[&z3_left, &z3_right]).into()
}
Node::Not { value, .. } => {
let z3_value = self.visit(value).as_bool().expect("bool");
z3_value.not().into()
}
Node::State { sort: NodeType::Bit, name, .. } => {
let name = name.as_deref().expect("name");
Bool::new_const(self.context, name).into()
}
Node::State { sort, name, .. } => {
let width = sort.bitsize() as u32;
let name = name.as_deref().expect("name");
BV::new_const(self.context, name, width).into()
}
Node::Input { sort, name, .. } => {
let width = sort.bitsize() as u32;
BV::new_const(self.context, name.to_owned(), width).into()
}
Node::Next { .. } => panic!("should be unreachable"),
Node::Bad { .. } => panic!("should be unreachable"),
Node::Comment(_) => panic!("cannot translate"),
}
}
}
}
| 38.105263 | 85 | 0.467245 |
0331c5d4ba4013280c27dabe8e141126117b5685 | 70,986 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Slice management and manipulation
//!
//! For more details `std::slice`.
#![stable(feature = "rust1", since = "1.0.0")]
// How this module is organized.
//
// The library infrastructure for slices is fairly messy. There's
// a lot of stuff defined here. Let's keep it clean.
//
// Since slices don't support inherent methods; all operations
// on them are defined on traits, which are then reexported from
// the prelude for convenience. So there are a lot of traits here.
//
// The layout of this file is thus:
//
// * Slice-specific 'extension' traits and their implementations. This
// is where most of the slice API resides.
// * Implementations of a few common traits with important slice ops.
// * Definitions of a bunch of iterators.
// * Free functions.
// * The `raw` and `bytes` submodules.
// * Boilerplate trait implementations.
use borrow::Borrow;
use cmp::Ordering::{self, Less, Equal, Greater};
use cmp;
use fmt;
use intrinsics::assume;
use iter::*;
use ops::{FnMut, self};
use option::Option;
use option::Option::{None, Some};
use result::Result;
use result::Result::{Ok, Err};
use ptr;
use mem;
use marker::{Copy, Send, Sync, Sized, self};
use iter_private::TrustedRandomAccess;
#[repr(C)]
struct Repr<T> {
pub data: *const T,
pub len: usize,
}
//
// Extension traits
//
/// Extension methods for slices.
#[unstable(feature = "core_slice_ext",
reason = "stable interface provided by `impl [T]` in later crates",
issue = "32110")]
#[allow(missing_docs)] // documented elsewhere
pub trait SliceExt {
type Item;
#[stable(feature = "core", since = "1.6.0")]
fn split_at(&self, mid: usize) -> (&[Self::Item], &[Self::Item]);
#[stable(feature = "core", since = "1.6.0")]
fn iter(&self) -> Iter<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn split<P>(&self, pred: P) -> Split<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn splitn<P>(&self, n: usize, pred: P) -> SplitN<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn windows(&self, size: usize) -> Windows<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn chunks(&self, size: usize) -> Chunks<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn get<I>(&self, index: I) -> Option<&I::Output>
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn first(&self) -> Option<&Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn split_first(&self) -> Option<(&Self::Item, &[Self::Item])>;
#[stable(feature = "core", since = "1.6.0")]
fn split_last(&self) -> Option<(&Self::Item, &[Self::Item])>;
#[stable(feature = "core", since = "1.6.0")]
fn last(&self) -> Option<&Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn as_ptr(&self) -> *const Self::Item;
#[stable(feature = "core", since = "1.6.0")]
fn binary_search<Q: ?Sized>(&self, x: &Q) -> Result<usize, usize>
where Self::Item: Borrow<Q>,
Q: Ord;
#[stable(feature = "core", since = "1.6.0")]
fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
where F: FnMut(&'a Self::Item) -> Ordering;
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
fn binary_search_by_key<'a, B, F, Q: ?Sized>(&'a self, b: &Q, f: F) -> Result<usize, usize>
where F: FnMut(&'a Self::Item) -> B,
B: Borrow<Q>,
Q: Ord;
#[stable(feature = "core", since = "1.6.0")]
fn len(&self) -> usize;
#[stable(feature = "core", since = "1.6.0")]
fn is_empty(&self) -> bool { self.len() == 0 }
#[stable(feature = "core", since = "1.6.0")]
fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn iter_mut(&mut self) -> IterMut<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn first_mut(&mut self) -> Option<&mut Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn split_first_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
#[stable(feature = "core", since = "1.6.0")]
fn split_last_mut(&mut self) -> Option<(&mut Self::Item, &mut [Self::Item])>;
#[stable(feature = "core", since = "1.6.0")]
fn last_mut(&mut self) -> Option<&mut Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn split_mut<P>(&mut self, pred: P) -> SplitMut<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
#[stable(feature = "core", since = "1.6.0")]
fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn swap(&mut self, a: usize, b: usize);
#[stable(feature = "core", since = "1.6.0")]
fn split_at_mut(&mut self, mid: usize) -> (&mut [Self::Item], &mut [Self::Item]);
#[stable(feature = "core", since = "1.6.0")]
fn reverse(&mut self);
#[stable(feature = "core", since = "1.6.0")]
unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn as_mut_ptr(&mut self) -> *mut Self::Item;
#[stable(feature = "core", since = "1.6.0")]
fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq;
#[stable(feature = "core", since = "1.6.0")]
fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
#[stable(feature = "core", since = "1.6.0")]
fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
#[stable(feature = "clone_from_slice", since = "1.7.0")]
fn clone_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Clone;
#[stable(feature = "copy_from_slice", since = "1.9.0")]
fn copy_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Copy;
}
// Use macros to be generic over const/mut
macro_rules! slice_offset {
($ptr:expr, $by:expr) => {{
let ptr = $ptr;
if size_from_ptr(ptr) == 0 {
(ptr as *mut i8).wrapping_offset($by) as _
} else {
ptr.offset($by)
}
}};
}
// make a &T from a *const T
macro_rules! make_ref {
($ptr:expr) => {{
let ptr = $ptr;
if size_from_ptr(ptr) == 0 {
// Use a non-null pointer value
&*(1 as *mut _)
} else {
&*ptr
}
}};
}
// make a &mut T from a *mut T
macro_rules! make_ref_mut {
($ptr:expr) => {{
let ptr = $ptr;
if size_from_ptr(ptr) == 0 {
// Use a non-null pointer value
&mut *(1 as *mut _)
} else {
&mut *ptr
}
}};
}
#[unstable(feature = "core_slice_ext",
reason = "stable interface provided by `impl [T]` in later crates",
issue = "32110")]
impl<T> SliceExt for [T] {
type Item = T;
#[inline]
fn split_at(&self, mid: usize) -> (&[T], &[T]) {
(&self[..mid], &self[mid..])
}
#[inline]
fn iter(&self) -> Iter<T> {
unsafe {
let p = if mem::size_of::<T>() == 0 {
1 as *const _
} else {
let p = self.as_ptr();
assume(!p.is_null());
p
};
Iter {
ptr: p,
end: slice_offset!(p, self.len() as isize),
_marker: marker::PhantomData
}
}
}
#[inline]
fn split<P>(&self, pred: P) -> Split<T, P> where P: FnMut(&T) -> bool {
Split {
v: self,
pred: pred,
finished: false
}
}
#[inline]
fn splitn<P>(&self, n: usize, pred: P) -> SplitN<T, P> where
P: FnMut(&T) -> bool,
{
SplitN {
inner: GenericSplitN {
iter: self.split(pred),
count: n,
invert: false
}
}
}
#[inline]
fn rsplitn<P>(&self, n: usize, pred: P) -> RSplitN<T, P> where
P: FnMut(&T) -> bool,
{
RSplitN {
inner: GenericSplitN {
iter: self.split(pred),
count: n,
invert: true
}
}
}
#[inline]
fn windows(&self, size: usize) -> Windows<T> {
assert!(size != 0);
Windows { v: self, size: size }
}
#[inline]
fn chunks(&self, size: usize) -> Chunks<T> {
assert!(size != 0);
Chunks { v: self, size: size }
}
#[inline]
fn get<I>(&self, index: I) -> Option<&I::Output>
where I: SliceIndex<T>
{
index.get(self)
}
#[inline]
fn first(&self) -> Option<&T> {
if self.is_empty() { None } else { Some(&self[0]) }
}
#[inline]
fn split_first(&self) -> Option<(&T, &[T])> {
if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
}
#[inline]
fn split_last(&self) -> Option<(&T, &[T])> {
let len = self.len();
if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
}
#[inline]
fn last(&self) -> Option<&T> {
if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
}
#[inline]
unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<T>
{
index.get_unchecked(self)
}
#[inline]
fn as_ptr(&self) -> *const T {
self as *const [T] as *const T
}
fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
where F: FnMut(&'a T) -> Ordering
{
let mut base = 0usize;
let mut s = self;
loop {
let (head, tail) = s.split_at(s.len() >> 1);
if tail.is_empty() {
return Err(base)
}
match f(&tail[0]) {
Less => {
base += head.len() + 1;
s = &tail[1..];
}
Greater => s = head,
Equal => return Ok(base + head.len()),
}
}
}
#[inline]
fn len(&self) -> usize {
unsafe {
mem::transmute::<&[T], Repr<T>>(self).len
}
}
#[inline]
fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where I: SliceIndex<T>
{
index.get_mut(self)
}
#[inline]
fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
let len = self.len();
let ptr = self.as_mut_ptr();
unsafe {
assert!(mid <= len);
(from_raw_parts_mut(ptr, mid),
from_raw_parts_mut(ptr.offset(mid as isize), len - mid))
}
}
#[inline]
fn iter_mut(&mut self) -> IterMut<T> {
unsafe {
let p = if mem::size_of::<T>() == 0 {
1 as *mut _
} else {
let p = self.as_mut_ptr();
assume(!p.is_null());
p
};
IterMut {
ptr: p,
end: slice_offset!(p, self.len() as isize),
_marker: marker::PhantomData
}
}
}
#[inline]
fn last_mut(&mut self) -> Option<&mut T> {
let len = self.len();
if len == 0 { return None; }
Some(&mut self[len - 1])
}
#[inline]
fn first_mut(&mut self) -> Option<&mut T> {
if self.is_empty() { None } else { Some(&mut self[0]) }
}
#[inline]
fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
if self.is_empty() { None } else {
let split = self.split_at_mut(1);
Some((&mut split.0[0], split.1))
}
}
#[inline]
fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
let len = self.len();
if len == 0 { None } else {
let split = self.split_at_mut(len - 1);
Some((&mut split.1[0], split.0))
}
}
#[inline]
fn split_mut<P>(&mut self, pred: P) -> SplitMut<T, P> where P: FnMut(&T) -> bool {
SplitMut { v: self, pred: pred, finished: false }
}
#[inline]
fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<T, P> where
P: FnMut(&T) -> bool
{
SplitNMut {
inner: GenericSplitN {
iter: self.split_mut(pred),
count: n,
invert: false
}
}
}
#[inline]
fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<T, P> where
P: FnMut(&T) -> bool,
{
RSplitNMut {
inner: GenericSplitN {
iter: self.split_mut(pred),
count: n,
invert: true
}
}
}
#[inline]
fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
assert!(chunk_size > 0);
ChunksMut { v: self, chunk_size: chunk_size }
}
#[inline]
fn swap(&mut self, a: usize, b: usize) {
unsafe {
// Can't take two mutable loans from one vector, so instead just cast
// them to their raw pointers to do the swap
let pa: *mut T = &mut self[a];
let pb: *mut T = &mut self[b];
ptr::swap(pa, pb);
}
}
fn reverse(&mut self) {
let mut i: usize = 0;
let ln = self.len();
while i < ln / 2 {
// Unsafe swap to avoid the bounds check in safe swap.
unsafe {
let pa: *mut T = self.get_unchecked_mut(i);
let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
ptr::swap(pa, pb);
}
i += 1;
}
}
#[inline]
unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<T>
{
index.get_unchecked_mut(self)
}
#[inline]
fn as_mut_ptr(&mut self) -> *mut T {
self as *mut [T] as *mut T
}
#[inline]
fn contains(&self, x: &T) -> bool where T: PartialEq {
self.iter().any(|elt| *x == *elt)
}
#[inline]
fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
let n = needle.len();
self.len() >= n && needle == &self[..n]
}
#[inline]
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
let (m, n) = (self.len(), needle.len());
m >= n && needle == &self[m-n..]
}
fn binary_search<Q: ?Sized>(&self, x: &Q) -> Result<usize, usize> where T: Borrow<Q>, Q: Ord {
self.binary_search_by(|p| p.borrow().cmp(x))
}
#[inline]
fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
assert!(self.len() == src.len(),
"destination and source slices have different lengths");
// NOTE: We need to explicitly slice them to the same length
// for bounds checking to be elided, and the optimizer will
// generate memcpy for simple cases (for example T = u8).
let len = self.len();
let src = &src[..len];
for i in 0..len {
self[i].clone_from(&src[i]);
}
}
#[inline]
fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
assert!(self.len() == src.len(),
"destination and source slices have different lengths");
unsafe {
ptr::copy_nonoverlapping(
src.as_ptr(), self.as_mut_ptr(), self.len());
}
}
#[inline]
fn binary_search_by_key<'a, B, F, Q: ?Sized>(&'a self, b: &Q, mut f: F) -> Result<usize, usize>
where F: FnMut(&'a Self::Item) -> B,
B: Borrow<Q>,
Q: Ord
{
self.binary_search_by(|k| f(k).borrow().cmp(b))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
impl<T, I> ops::Index<I> for [T]
where I: SliceIndex<T>
{
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &I::Output {
index.index(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
impl<T, I> ops::IndexMut<I> for [T]
where I: SliceIndex<T>
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut I::Output {
index.index_mut(self)
}
}
#[inline(never)]
#[cold]
fn slice_index_len_fail(index: usize, len: usize) -> ! {
panic!("index {} out of range for slice of length {}", index, len);
}
#[inline(never)]
#[cold]
fn slice_index_order_fail(index: usize, end: usize) -> ! {
panic!("slice index starts at {} but ends at {}", index, end);
}
/// A helper trait used for indexing operations.
#[unstable(feature = "slice_get_slice", issue = "35729")]
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
pub trait SliceIndex<T> {
/// The output type returned by methods.
type Output: ?Sized;
/// Returns a shared reference to the output at this location, if in
/// bounds.
fn get(self, slice: &[T]) -> Option<&Self::Output>;
/// Returns a mutable reference to the output at this location, if in
/// bounds.
fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output>;
/// Returns a shared reference to the output at this location, without
/// performing any bounds checking.
unsafe fn get_unchecked(self, slice: &[T]) -> &Self::Output;
/// Returns a mutable reference to the output at this location, without
/// performing any bounds checking.
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut Self::Output;
/// Returns a shared reference to the output at this location, panicking
/// if out of bounds.
fn index(self, slice: &[T]) -> &Self::Output;
/// Returns a mutable reference to the output at this location, panicking
/// if out of bounds.
fn index_mut(self, slice: &mut [T]) -> &mut Self::Output;
}
#[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
impl<T> SliceIndex<T> for usize {
type Output = T;
#[inline]
fn get(self, slice: &[T]) -> Option<&T> {
if self < slice.len() {
unsafe {
Some(self.get_unchecked(slice))
}
} else {
None
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
if self < slice.len() {
unsafe {
Some(self.get_unchecked_mut(slice))
}
} else {
None
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &T {
&*slice.as_ptr().offset(self as isize)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
&mut *slice.as_mut_ptr().offset(self as isize)
}
#[inline]
fn index(self, slice: &[T]) -> &T {
// NB: use intrinsic indexing
&(*slice)[self]
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut T {
// NB: use intrinsic indexing
&mut (*slice)[self]
}
}
#[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
impl<T> SliceIndex<T> for ops::Range<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
if self.start > self.end || self.end > slice.len() {
None
} else {
unsafe {
Some(self.get_unchecked(slice))
}
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
if self.start > self.end || self.end > slice.len() {
None
} else {
unsafe {
Some(self.get_unchecked_mut(slice))
}
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
from_raw_parts(slice.as_ptr().offset(self.start as isize), self.end - self.start)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
from_raw_parts_mut(slice.as_mut_ptr().offset(self.start as isize), self.end - self.start)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
if self.start > self.end {
slice_index_order_fail(self.start, self.end);
} else if self.end > slice.len() {
slice_index_len_fail(self.end, slice.len());
}
unsafe {
self.get_unchecked(slice)
}
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
if self.start > self.end {
slice_index_order_fail(self.start, self.end);
} else if self.end > slice.len() {
slice_index_len_fail(self.end, slice.len());
}
unsafe {
self.get_unchecked_mut(slice)
}
}
}
#[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
impl<T> SliceIndex<T> for ops::RangeTo<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
(0..self.end).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(0..self.end).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(0..self.end).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(0..self.end).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(0..self.end).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(0..self.end).index_mut(slice)
}
}
#[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
impl<T> SliceIndex<T> for ops::RangeFrom<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
(self.start..slice.len()).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(self.start..slice.len()).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(self.start..slice.len()).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(self.start..slice.len()).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(self.start..slice.len()).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(self.start..slice.len()).index_mut(slice)
}
}
#[stable(feature = "slice-get-slice-impls", since = "1.15.0")]
impl<T> SliceIndex<T> for ops::RangeFull {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
Some(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
Some(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
slice
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
slice
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
slice
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
slice
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
impl<T> SliceIndex<T> for ops::RangeInclusive<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
match self {
ops::RangeInclusive::Empty { .. } => Some(&[]),
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None,
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get(slice),
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
match self {
ops::RangeInclusive::Empty { .. } => Some(&mut []),
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None,
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_mut(slice),
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
match self {
ops::RangeInclusive::Empty { .. } => &[],
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_unchecked(slice),
}
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
match self {
ops::RangeInclusive::Empty { .. } => &mut [],
ops::RangeInclusive::NonEmpty { start, end } => {
(start..end + 1).get_unchecked_mut(slice)
}
}
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
match self {
ops::RangeInclusive::Empty { .. } => &[],
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => {
panic!("attempted to index slice up to maximum usize");
},
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index(slice),
}
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
match self {
ops::RangeInclusive::Empty { .. } => &mut [],
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => {
panic!("attempted to index slice up to maximum usize");
},
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index_mut(slice),
}
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
impl<T> SliceIndex<T> for ops::RangeToInclusive<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
(0...self.end).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(0...self.end).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(0...self.end).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(0...self.end).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(0...self.end).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(0...self.end).index_mut(slice)
}
}
////////////////////////////////////////////////////////////////////////////////
// Common traits
////////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Default for &'a [T] {
/// Creates an empty slice.
fn default() -> &'a [T] { &[] }
}
#[stable(feature = "mut_slice_default", since = "1.5.0")]
impl<'a, T> Default for &'a mut [T] {
/// Creates a mutable empty slice.
fn default() -> &'a mut [T] { &mut [] }
}
//
// Iterators
//
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a [T] {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a mut [T] {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> IterMut<'a, T> {
self.iter_mut()
}
}
#[inline(always)]
fn size_from_ptr<T>(_: *const T) -> usize {
mem::size_of::<T>()
}
// The shared definition of the `Iter` and `IterMut` iterators
macro_rules! iterator {
(struct $name:ident -> $ptr:ty, $elem:ty, $mkref:ident) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for $name<'a, T> {
type Item = $elem;
#[inline]
fn next(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
unsafe {
if mem::size_of::<T>() != 0 {
assume(!self.ptr.is_null());
assume(!self.end.is_null());
}
if self.ptr == self.end {
None
} else {
Some($mkref!(self.ptr.post_inc()))
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = ptrdistance(self.ptr, self.end);
(exact, Some(exact))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<$elem> {
// Call helper method. Can't put the definition here because mut versus const.
self.iter_nth(n)
}
#[inline]
fn last(mut self) -> Option<$elem> {
self.next_back()
}
fn all<F>(&mut self, mut predicate: F) -> bool
where F: FnMut(Self::Item) -> bool,
{
self.search_while(true, move |elt| {
if predicate(elt) {
SearchWhile::Continue
} else {
SearchWhile::Done(false)
}
})
}
fn any<F>(&mut self, mut predicate: F) -> bool
where F: FnMut(Self::Item) -> bool,
{
!self.all(move |elt| !predicate(elt))
}
fn find<F>(&mut self, mut predicate: F) -> Option<Self::Item>
where F: FnMut(&Self::Item) -> bool,
{
self.search_while(None, move |elt| {
if predicate(&elt) {
SearchWhile::Done(Some(elt))
} else {
SearchWhile::Continue
}
})
}
fn position<F>(&mut self, mut predicate: F) -> Option<usize>
where F: FnMut(Self::Item) -> bool,
{
let mut index = 0;
self.search_while(None, move |elt| {
if predicate(elt) {
SearchWhile::Done(Some(index))
} else {
index += 1;
SearchWhile::Continue
}
})
}
fn rposition<F>(&mut self, mut predicate: F) -> Option<usize>
where F: FnMut(Self::Item) -> bool,
{
let mut index = self.len();
self.rsearch_while(None, move |elt| {
index -= 1;
if predicate(elt) {
SearchWhile::Done(Some(index))
} else {
SearchWhile::Continue
}
})
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for $name<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
unsafe {
if mem::size_of::<T>() != 0 {
assume(!self.ptr.is_null());
assume(!self.end.is_null());
}
if self.end == self.ptr {
None
} else {
Some($mkref!(self.end.pre_dec()))
}
}
}
}
// search_while is a generalization of the internal iteration methods.
impl<'a, T> $name<'a, T> {
// search through the iterator's element using the closure `g`.
// if no element was found, return `default`.
fn search_while<Acc, G>(&mut self, default: Acc, mut g: G) -> Acc
where Self: Sized,
G: FnMut($elem) -> SearchWhile<Acc>
{
// manual unrolling is needed when there are conditional exits from the loop
unsafe {
while ptrdistance(self.ptr, self.end) >= 4 {
search_while!(g($mkref!(self.ptr.post_inc())));
search_while!(g($mkref!(self.ptr.post_inc())));
search_while!(g($mkref!(self.ptr.post_inc())));
search_while!(g($mkref!(self.ptr.post_inc())));
}
while self.ptr != self.end {
search_while!(g($mkref!(self.ptr.post_inc())));
}
}
default
}
fn rsearch_while<Acc, G>(&mut self, default: Acc, mut g: G) -> Acc
where Self: Sized,
G: FnMut($elem) -> SearchWhile<Acc>
{
unsafe {
while ptrdistance(self.ptr, self.end) >= 4 {
search_while!(g($mkref!(self.end.pre_dec())));
search_while!(g($mkref!(self.end.pre_dec())));
search_while!(g($mkref!(self.end.pre_dec())));
search_while!(g($mkref!(self.end.pre_dec())));
}
while self.ptr != self.end {
search_while!(g($mkref!(self.end.pre_dec())));
}
}
default
}
}
}
}
macro_rules! make_slice {
($start: expr, $end: expr) => {{
let start = $start;
let diff = ($end as usize).wrapping_sub(start as usize);
if size_from_ptr(start) == 0 {
// use a non-null pointer value
unsafe { from_raw_parts(1 as *const _, diff) }
} else {
let len = diff / size_from_ptr(start);
unsafe { from_raw_parts(start, len) }
}
}}
}
macro_rules! make_mut_slice {
($start: expr, $end: expr) => {{
let start = $start;
let diff = ($end as usize).wrapping_sub(start as usize);
if size_from_ptr(start) == 0 {
// use a non-null pointer value
unsafe { from_raw_parts_mut(1 as *mut _, diff) }
} else {
let len = diff / size_from_ptr(start);
unsafe { from_raw_parts_mut(start, len) }
}
}}
}
// An enum used for controlling the execution of `.search_while()`.
enum SearchWhile<T> {
// Continue searching
Continue,
// Fold is complete and will return this value
Done(T),
}
// helper macro for search while's control flow
macro_rules! search_while {
($e:expr) => {
match $e {
SearchWhile::Continue => { }
SearchWhile::Done(done) => return done,
}
}
}
/// Immutable slice iterator
///
/// This struct is created by the [`iter`] method on [slices].
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
/// let slice = &[1, 2, 3];
///
/// // Then, we iterate over it:
/// for element in slice.iter() {
/// println!("{}", element);
/// }
/// ```
///
/// [`iter`]: ../../std/primitive.slice.html#method.iter
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
ptr: *const T,
end: *const T,
_marker: marker::PhantomData<&'a T>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Iter")
.field(&self.as_slice())
.finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
impl<'a, T> Iter<'a, T> {
/// View the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has the `iter` method to get the `Iter`
/// // struct (&[usize here]):
/// let slice = &[1, 2, 3];
///
/// // Then, we get the iterator:
/// let mut iter = slice.iter();
/// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
/// println!("{:?}", iter.as_slice());
///
/// // Next, we move to the second element of the slice:
/// iter.next();
/// // Now `as_slice` returns "[2, 3]":
/// println!("{:?}", iter.as_slice());
/// ```
#[stable(feature = "iter_to_slice", since = "1.4.0")]
pub fn as_slice(&self) -> &'a [T] {
make_slice!(self.ptr, self.end)
}
// Helper function for Iter::nth
fn iter_nth(&mut self, n: usize) -> Option<&'a T> {
match self.as_slice().get(n) {
Some(elem_ref) => unsafe {
self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1));
Some(elem_ref)
},
None => {
self.ptr = self.end;
None
}
}
}
}
iterator!{struct Iter -> *const T, &'a T, make_ref}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<'a, T> TrustedLen for Iter<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
}
#[stable(feature = "slice_iter_as_ref", since = "1.12.0")]
impl<'a, T> AsRef<[T]> for Iter<'a, T> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
/// Mutable slice iterator.
///
/// This struct is created by the [`iter_mut`] method on [slices].
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter_mut` method to get the `IterMut`
/// // struct (&[usize here]):
/// let mut slice = &mut [1, 2, 3];
///
/// // Then, we iterate over it and increment each element value:
/// for element in slice.iter_mut() {
/// *element += 1;
/// }
///
/// // We now have "[2, 3, 4]":
/// println!("{:?}", slice);
/// ```
///
/// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> {
ptr: *mut T,
end: *mut T,
_marker: marker::PhantomData<&'a mut T>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("IterMut")
.field(&make_slice!(self.ptr, self.end))
.finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
impl<'a, T> IterMut<'a, T> {
/// View the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut` references that alias, this is forced
/// to consume the iterator. Consider using the `Slice` and
/// `SliceMut` implementations for obtaining slices with more
/// restricted lifetimes that do not consume the iterator.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter_mut` method to get the `IterMut`
/// // struct (&[usize here]):
/// let mut slice = &mut [1, 2, 3];
///
/// {
/// // Then, we get the iterator:
/// let mut iter = slice.iter_mut();
/// // We move to next element:
/// iter.next();
/// // So if we print what `into_slice` method returns here, we have "[2, 3]":
/// println!("{:?}", iter.into_slice());
/// }
///
/// // Now let's modify a value of the slice:
/// {
/// // First we get back the iterator:
/// let mut iter = slice.iter_mut();
/// // We change the value of the first element of the slice returned by the `next` method:
/// *iter.next().unwrap() += 1;
/// }
/// // Now slice is "[2, 2, 3]":
/// println!("{:?}", slice);
/// ```
#[stable(feature = "iter_to_slice", since = "1.4.0")]
pub fn into_slice(self) -> &'a mut [T] {
make_mut_slice!(self.ptr, self.end)
}
// Helper function for IterMut::nth
fn iter_nth(&mut self, n: usize) -> Option<&'a mut T> {
match make_mut_slice!(self.ptr, self.end).get_mut(n) {
Some(elem_ref) => unsafe {
self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1));
Some(elem_ref)
},
None => {
self.ptr = self.end;
None
}
}
}
}
iterator!{struct IterMut -> *mut T, &'a mut T, make_ref_mut}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<'a, T> TrustedLen for IterMut<'a, T> {}
// Return the number of elements of `T` from `start` to `end`.
// Return the arithmetic difference if `T` is zero size.
#[inline(always)]
fn ptrdistance<T>(start: *const T, end: *const T) -> usize {
let diff = (end as usize).wrapping_sub(start as usize);
let size = mem::size_of::<T>();
diff / (if size == 0 { 1 } else { size })
}
// Extension methods for raw pointers, used by the iterators
trait PointerExt : Copy {
unsafe fn slice_offset(self, i: isize) -> Self;
/// Increment self by 1, but return the old value
#[inline(always)]
unsafe fn post_inc(&mut self) -> Self {
let current = *self;
*self = self.slice_offset(1);
current
}
/// Decrement self by 1, and return the new value
#[inline(always)]
unsafe fn pre_dec(&mut self) -> Self {
*self = self.slice_offset(-1);
*self
}
}
impl<T> PointerExt for *const T {
#[inline(always)]
unsafe fn slice_offset(self, i: isize) -> Self {
slice_offset!(self, i)
}
}
impl<T> PointerExt for *mut T {
#[inline(always)]
unsafe fn slice_offset(self, i: isize) -> Self {
slice_offset!(self, i)
}
}
/// An internal abstraction over the splitting iterators, so that
/// splitn, splitn_mut etc can be implemented once.
#[doc(hidden)]
trait SplitIter: DoubleEndedIterator {
/// Mark the underlying iterator as complete, extracting the remaining
/// portion of the slice.
fn finish(&mut self) -> Option<Self::Item>;
}
/// An iterator over subslices separated by elements that match a predicate
/// function.
///
/// This struct is created by the [`split`] method on [slices].
///
/// [`split`]: ../../std/primitive.slice.html#method.split
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
v: &'a [T],
pred: P,
finished: bool
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for Split<'a, T, P> where P: FnMut(&T) -> bool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Split")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool {
fn clone(&self) -> Split<'a, T, P> {
Split {
v: self.v,
pred: self.pred.clone(),
finished: self.finished,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished { return None; }
match self.v.iter().position(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
self.v = &self.v[idx + 1..];
ret
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
(1, Some(self.v.len() + 1))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished { return None; }
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[idx + 1..]);
self.v = &self.v[..idx];
ret
}
}
}
}
impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
#[inline]
fn finish(&mut self) -> Option<&'a [T]> {
if self.finished { None } else { self.finished = true; Some(self.v) }
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T, P> FusedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the subslices of the vector which are separated
/// by elements that match `pred`.
///
/// This struct is created by the [`split_mut`] method on [slices].
///
/// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
v: &'a mut [T],
pred: P,
finished: bool
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SplitMut")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
#[inline]
fn finish(&mut self) -> Option<&'a mut [T]> {
if self.finished {
None
} else {
self.finished = true;
Some(mem::replace(&mut self.v, &mut []))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished { return None; }
let idx_opt = { // work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = &mut tail[1..];
Some(head)
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// if the predicate doesn't match anything, we yield one slice
// if it matches every element, we yield len+1 empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished { return None; }
let idx_opt = { // work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().rposition(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(&mut tail[1..])
}
}
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T, P> FusedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
/// An private iterator over subslices separated by elements that
/// match a predicate function, splitting at most a fixed number of
/// times.
#[derive(Debug)]
struct GenericSplitN<I> {
iter: I,
count: usize,
invert: bool
}
impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
match self.count {
0 => None,
1 => { self.count -= 1; self.iter.finish() }
_ => {
self.count -= 1;
if self.invert {self.iter.next_back()} else {self.iter.next()}
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper_opt) = self.iter.size_hint();
(lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
///
/// This struct is created by the [`splitn`] method on [slices].
///
/// [`splitn`]: ../../std/primitive.slice.html#method.splitn
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<Split<'a, T, P>>
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitN<'a, T, P> where P: FnMut(&T) -> bool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SplitN")
.field("inner", &self.inner)
.finish()
}
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
///
/// This struct is created by the [`rsplitn`] method on [slices].
///
/// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<Split<'a, T, P>>
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitN<'a, T, P> where P: FnMut(&T) -> bool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("RSplitN")
.field("inner", &self.inner)
.finish()
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
///
/// This struct is created by the [`splitn_mut`] method on [slices].
///
/// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<SplitMut<'a, T, P>>
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for SplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SplitNMut")
.field("inner", &self.inner)
.finish()
}
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
///
/// This struct is created by the [`rsplitn_mut`] method on [slices].
///
/// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<SplitMut<'a, T, P>>
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for RSplitNMut<'a, T, P> where P: FnMut(&T) -> bool {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("RSplitNMut")
.field("inner", &self.inner)
.finish()
}
}
macro_rules! forward_iterator {
($name:ident: $elem:ident, $iter_of:ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
P: FnMut(&T) -> bool
{
type Item = $iter_of;
#[inline]
fn next(&mut self) -> Option<$iter_of> {
self.inner.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
where P: FnMut(&T) -> bool {}
}
}
forward_iterator! { SplitN: T, &'a [T] }
forward_iterator! { RSplitN: T, &'a [T] }
forward_iterator! { SplitNMut: T, &'a mut [T] }
forward_iterator! { RSplitNMut: T, &'a mut [T] }
/// An iterator over overlapping subslices of length `size`.
///
/// This struct is created by the [`windows`] method on [slices].
///
/// [`windows`]: ../../std/primitive.slice.html#method.windows
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Windows<'a, T:'a> {
v: &'a [T],
size: usize
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Windows<'a, T> {
fn clone(&self) -> Windows<'a, T> {
Windows {
v: self.v,
size: self.size,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Windows<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.size > self.v.len() {
None
} else {
let ret = Some(&self.v[..self.size]);
self.v = &self.v[1..];
ret
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.size > self.v.len() {
(0, Some(0))
} else {
let size = self.v.len() - self.size + 1;
(size, Some(size))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = self.size.overflowing_add(n);
if end > self.v.len() || overflow {
self.v = &[];
None
} else {
let nth = &self.v[n..end];
self.v = &self.v[n+1..];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.size > self.v.len() {
None
} else {
let start = self.v.len() - self.size;
Some(&self.v[start..])
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.size > self.v.len() {
None
} else {
let ret = Some(&self.v[self.v.len()-self.size..]);
self.v = &self.v[..self.v.len()-1];
ret
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Windows<'a, T> {}
/// An iterator over a slice in (non-overlapping) chunks (`size` elements at a
/// time).
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`chunks`] method on [slices].
///
/// [`chunks`]: ../../std/primitive.slice.html#method.chunks
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chunks<'a, T:'a> {
v: &'a [T],
size: usize
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Chunks<'a, T> {
fn clone(&self) -> Chunks<'a, T> {
Chunks {
v: self.v,
size: self.size,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Chunks<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let chunksz = cmp::min(self.v.len(), self.size);
let (fst, snd) = self.v.split_at(chunksz);
self.v = snd;
Some(fst)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.size;
let rem = self.v.len() % self.size;
let n = if rem > 0 { n+1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (start, overflow) = n.overflowing_mul(self.size);
if start >= self.v.len() || overflow {
self.v = &[];
None
} else {
let end = match start.checked_add(self.size) {
Some(sum) => cmp::min(self.v.len(), sum),
None => self.v.len(),
};
let nth = &self.v[start..end];
self.v = &self.v[end..];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let start = (self.v.len() - 1) / self.size * self.size;
Some(&self.v[start..])
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.size;
let chunksz = if remainder != 0 { remainder } else { self.size };
let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
self.v = fst;
Some(snd)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Chunks<'a, T> {}
/// An iterator over a slice in (non-overlapping) mutable chunks (`size`
/// elements at a time). When the slice len is not evenly divided by the chunk
/// size, the last slice of the iteration will be the remainder.
///
/// This struct is created by the [`chunks_mut`] method on [slices].
///
/// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ChunksMut<'a, T:'a> {
v: &'a mut [T],
chunk_size: usize
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for ChunksMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let sz = cmp::min(self.v.len(), self.chunk_size);
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(sz);
self.v = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let end = match start.checked_add(self.chunk_size) {
Some(sum) => cmp::min(self.v.len(), sum),
None => self.v.len(),
};
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(end);
let (_, nth) = head.split_at_mut(start);
self.v = tail;
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
Some(&mut self.v[start..])
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let sz = if remainder != 0 { remainder } else { self.chunk_size };
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - sz);
self.v = head;
Some(tail)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for ChunksMut<'a, T> {}
//
// Free functions
//
/// Forms a slice from a pointer and a length.
///
/// The `len` argument is the number of **elements**, not the number of bytes.
///
/// # Safety
///
/// This function is unsafe as there is no guarantee that the given pointer is
/// valid for `len` elements, nor whether the lifetime inferred is a suitable
/// lifetime for the returned slice.
///
/// `p` must be non-null, even for zero-length slices.
///
/// # Caveat
///
/// The lifetime for the returned slice is inferred from its usage. To
/// prevent accidental misuse, it's suggested to tie the lifetime to whichever
/// source lifetime is safe in the context, such as by providing a helper
/// function taking the lifetime of a host value for the slice, or by explicit
/// annotation.
///
/// # Examples
///
/// ```
/// use std::slice;
///
/// // manifest a slice out of thin air!
/// let ptr = 0x1234 as *const usize;
/// let amt = 10;
/// unsafe {
/// let slice = slice::from_raw_parts(ptr, amt);
/// }
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] {
mem::transmute(Repr { data: p, len: len })
}
/// Performs the same functionality as `from_raw_parts`, except that a mutable
/// slice is returned.
///
/// This function is unsafe for the same reasons as `from_raw_parts`, as well
/// as not being able to provide a non-aliasing guarantee of the returned
/// mutable slice.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts_mut<'a, T>(p: *mut T, len: usize) -> &'a mut [T] {
mem::transmute(Repr { data: p, len: len })
}
//
// Comparison traits
//
extern {
/// Call implementation provided memcmp
///
/// Interprets the data as u8.
///
/// Return 0 for equal, < 0 for less than and > 0 for greater
/// than.
// FIXME(#32610): Return type should be c_int
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
fn eq(&self, other: &[B]) -> bool {
SlicePartialEq::equal(self, other)
}
fn ne(&self, other: &[B]) -> bool {
SlicePartialEq::not_equal(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for [T] {}
/// Implements comparison of vectors lexicographically.
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
SliceOrd::compare(self, other)
}
}
/// Implements comparison of vectors lexicographically.
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for [T] {
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
SlicePartialOrd::partial_compare(self, other)
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialEq
trait SlicePartialEq<B> {
fn equal(&self, other: &[B]) -> bool;
fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
}
// Generic slice equality
impl<A, B> SlicePartialEq<B> for [A]
where A: PartialEq<B>
{
default fn equal(&self, other: &[B]) -> bool {
if self.len() != other.len() {
return false;
}
for i in 0..self.len() {
if !self[i].eq(&other[i]) {
return false;
}
}
true
}
}
// Use memcmp for bytewise equality when the types allow
impl<A> SlicePartialEq<A> for [A]
where A: PartialEq<A> + BytewiseEquality
{
fn equal(&self, other: &[A]) -> bool {
if self.len() != other.len() {
return false;
}
if self.as_ptr() == other.as_ptr() {
return true;
}
unsafe {
let size = mem::size_of_val(self);
memcmp(self.as_ptr() as *const u8,
other.as_ptr() as *const u8, size) == 0
}
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialOrd
trait SlicePartialOrd<B> {
fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
}
impl<A> SlicePartialOrd<A> for [A]
where A: PartialOrd
{
default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
let l = cmp::min(self.len(), other.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &self[..l];
let rhs = &other[..l];
for i in 0..l {
match lhs[i].partial_cmp(&rhs[i]) {
Some(Ordering::Equal) => (),
non_eq => return non_eq,
}
}
self.len().partial_cmp(&other.len())
}
}
impl<A> SlicePartialOrd<A> for [A]
where A: Ord
{
default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(self, other))
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's Ord
trait SliceOrd<B> {
fn compare(&self, other: &[B]) -> Ordering;
}
impl<A> SliceOrd<A> for [A]
where A: Ord
{
default fn compare(&self, other: &[A]) -> Ordering {
let l = cmp::min(self.len(), other.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &self[..l];
let rhs = &other[..l];
for i in 0..l {
match lhs[i].cmp(&rhs[i]) {
Ordering::Equal => (),
non_eq => return non_eq,
}
}
self.len().cmp(&other.len())
}
}
// memcmp compares a sequence of unsigned bytes lexicographically.
// this matches the order we want for [u8], but no others (not even [i8]).
impl SliceOrd<u8> for [u8] {
#[inline]
fn compare(&self, other: &[u8]) -> Ordering {
let order = unsafe {
memcmp(self.as_ptr(), other.as_ptr(),
cmp::min(self.len(), other.len()))
};
if order == 0 {
self.len().cmp(&other.len())
} else if order < 0 {
Less
} else {
Greater
}
}
}
#[doc(hidden)]
/// Trait implemented for types that can be compared for equality using
/// their bytewise representation
trait BytewiseEquality { }
macro_rules! impl_marker_for {
($traitname:ident, $($ty:ty)*) => {
$(
impl $traitname for $ty { }
)*
}
}
impl_marker_for!(BytewiseEquality,
u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
&*self.ptr.offset(i as isize)
}
fn may_have_side_effect() -> bool { false }
}
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
&mut *self.ptr.offset(i as isize)
}
fn may_have_side_effect() -> bool { false }
}
| 29.813524 | 99 | 0.515482 |
8f6029cdaaeb687558538d841f7bbd6588839849 | 1,887 | use rand::distributions::{Distribution, Uniform};
use sdl2::event::Event;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::gfx::primitives::DrawRenderer;
const ROWS: u128 = 100000;
const COLS: u128 = 100000;
fn main() -> Result<(), String> {
let r = 50000.0;
let dist = Uniform::from(0..100000 as u128 * 100000 as u128);
let mut rng = rand::thread_rng();
let sdl2_context = sdl2::init()?;
let sdl2_video_subsystem = sdl2_context.video()?;
let window = sdl2_video_subsystem.window("PI", 600, 600)
.position_centered()
.opengl()
.build()
.map_err(|e| e.to_string())?;
let mut event_pump = sdl2_context.event_pump()?;
let mut canvas = window.into_canvas().build().map_err(|e| e.to_string())?;
canvas.set_draw_color(Color::RGB(15,15,15));
canvas.clear();
canvas.circle(300,300,300, Color::WHITE)?;
let mut points = 0.0;
let mut circle = 0.0;
'running: loop {
for event in event_pump.poll_iter() {
match event {
Event::Quit {..} => break 'running,
_ => {}
}
}
for _ in 0..10000 {
let pos = dist.sample(&mut rng);
points += 1.0;
let x = pos % ROWS;
let y = pos / ROWS;
let d = f64::hypot(x as f64 - r, y as f64 - r);
if d < r {
circle += 1.0;
canvas.set_draw_color(Color::GREEN);
} else {
canvas.set_draw_color(Color::RED);
}
canvas.fill_rect(Rect::new((x * 600 as u128 / ROWS) as i32,
(y * 600 as u128 / COLS) as i32,1,1))?;
}
canvas.present();
canvas.window_mut().set_title(
&format!("PI: {}", 4.0 * circle / points))
.map_err(|e| e.to_string())?;
}
Ok(())
}
| 31.983051 | 78 | 0.517753 |
ef1ae807453f2a6b1315ecb1e37c390cbeab7ec9 | 95,378 | use self::EnumTagInfo::*;
use self::MemberDescriptionFactory::*;
use self::RecursiveTypeDescription::*;
use super::namespace::mangled_name_of_instance;
use super::type_names::compute_debuginfo_type_name;
use super::utils::{
create_DIArray, debug_context, get_namespace_for_item, is_node_local_to_unit, DIB,
};
use super::CrateDebugContext;
use crate::abi;
use crate::common::CodegenCx;
use crate::llvm;
use crate::llvm::debuginfo::{
DIArray, DICompositeType, DIDescriptor, DIFile, DIFlags, DILexicalBlock, DIScope, DIType,
DebugEmissionKind,
};
use crate::value::Value;
use rustc_ast as ast;
use rustc_codegen_ssa::traits::*;
use rustc_data_structures::const_cstr;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_fs_util::path_to_c_string;
use rustc_hir::def::CtorKind;
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::ich::NodeIdHashingMode;
use rustc_middle::mir::interpret::truncate;
use rustc_middle::mir::{self, Field, GeneratorLayout};
use rustc_middle::ty::layout::{self, IntegerExt, PrimitiveExt, TyAndLayout};
use rustc_middle::ty::subst::GenericArgKind;
use rustc_middle::ty::Instance;
use rustc_middle::ty::{self, AdtKind, GeneratorSubsts, ParamEnv, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_session::config::{self, DebugInfo};
use rustc_span::symbol::{Interner, Symbol};
use rustc_span::{self, SourceFile, SourceFileHash, Span};
use rustc_target::abi::{Abi, Align, HasDataLayout, Integer, LayoutOf, TagEncoding};
use rustc_target::abi::{Int, Pointer, F32, F64};
use rustc_target::abi::{Primitive, Size, VariantIdx, Variants};
use tracing::debug;
use libc::{c_longlong, c_uint};
use std::collections::hash_map::Entry;
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::iter;
use std::path::{Path, PathBuf};
use std::ptr;
impl PartialEq for llvm::Metadata {
fn eq(&self, other: &Self) -> bool {
ptr::eq(self, other)
}
}
impl Eq for llvm::Metadata {}
impl Hash for llvm::Metadata {
fn hash<H: Hasher>(&self, hasher: &mut H) {
(self as *const Self).hash(hasher);
}
}
impl fmt::Debug for llvm::Metadata {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(self as *const Self).fmt(f)
}
}
// From DWARF 5.
// See http://www.dwarfstd.org/ShowIssue.php?issue=140129.1.
const DW_LANG_RUST: c_uint = 0x1c;
#[allow(non_upper_case_globals)]
const DW_ATE_boolean: c_uint = 0x02;
#[allow(non_upper_case_globals)]
const DW_ATE_float: c_uint = 0x04;
#[allow(non_upper_case_globals)]
const DW_ATE_signed: c_uint = 0x05;
#[allow(non_upper_case_globals)]
const DW_ATE_unsigned: c_uint = 0x07;
#[allow(non_upper_case_globals)]
const DW_ATE_unsigned_char: c_uint = 0x08;
pub const UNKNOWN_LINE_NUMBER: c_uint = 0;
pub const UNKNOWN_COLUMN_NUMBER: c_uint = 0;
pub const NO_SCOPE_METADATA: Option<&DIScope> = None;
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
pub struct UniqueTypeId(Symbol);
/// The `TypeMap` is where the `CrateDebugContext` holds the type metadata nodes
/// created so far. The metadata nodes are indexed by `UniqueTypeId`, and, for
/// faster lookup, also by `Ty`. The `TypeMap` is responsible for creating
/// `UniqueTypeId`s.
#[derive(Default)]
pub struct TypeMap<'ll, 'tcx> {
/// The `UniqueTypeId`s created so far.
unique_id_interner: Interner,
/// A map from `UniqueTypeId` to debuginfo metadata for that type. This is a 1:1 mapping.
unique_id_to_metadata: FxHashMap<UniqueTypeId, &'ll DIType>,
/// A map from types to debuginfo metadata. This is an N:1 mapping.
type_to_metadata: FxHashMap<Ty<'tcx>, &'ll DIType>,
/// A map from types to `UniqueTypeId`. This is an N:1 mapping.
type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId>,
}
impl TypeMap<'ll, 'tcx> {
/// Adds a Ty to metadata mapping to the TypeMap. The method will fail if
/// the mapping already exists.
fn register_type_with_metadata(&mut self, type_: Ty<'tcx>, metadata: &'ll DIType) {
if self.type_to_metadata.insert(type_, metadata).is_some() {
bug!("type metadata for `Ty` '{}' is already in the `TypeMap`!", type_);
}
}
/// Removes a `Ty`-to-metadata mapping.
/// This is useful when computing the metadata for a potentially
/// recursive type (e.g., a function pointer of the form:
///
/// fn foo() -> impl Copy { foo }
///
/// This kind of type cannot be properly represented
/// via LLVM debuginfo. As a workaround,
/// we register a temporary Ty to metadata mapping
/// for the function before we compute its actual metadata.
/// If the metadata computation ends up recursing back to the
/// original function, it will use the temporary mapping
/// for the inner self-reference, preventing us from
/// recursing forever.
///
/// This function is used to remove the temporary metadata
/// mapping after we've computed the actual metadata.
fn remove_type(&mut self, type_: Ty<'tcx>) {
if self.type_to_metadata.remove(type_).is_none() {
bug!("type metadata `Ty` '{}' is not in the `TypeMap`!", type_);
}
}
/// Adds a `UniqueTypeId` to metadata mapping to the `TypeMap`. The method will
/// fail if the mapping already exists.
fn register_unique_id_with_metadata(
&mut self,
unique_type_id: UniqueTypeId,
metadata: &'ll DIType,
) {
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
bug!(
"type metadata for unique ID '{}' is already in the `TypeMap`!",
self.get_unique_type_id_as_string(unique_type_id)
);
}
}
fn find_metadata_for_type(&self, type_: Ty<'tcx>) -> Option<&'ll DIType> {
self.type_to_metadata.get(&type_).cloned()
}
fn find_metadata_for_unique_id(&self, unique_type_id: UniqueTypeId) -> Option<&'ll DIType> {
self.unique_id_to_metadata.get(&unique_type_id).cloned()
}
/// Gets the string representation of a `UniqueTypeId`. This method will fail if
/// the ID is unknown.
fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> &str {
let UniqueTypeId(interner_key) = unique_type_id;
self.unique_id_interner.get(interner_key)
}
/// Gets the `UniqueTypeId` for the given type. If the `UniqueTypeId` for the given
/// type has been requested before, this is just a table lookup. Otherwise, an
/// ID will be generated and stored for later lookup.
fn get_unique_type_id_of_type<'a>(
&mut self,
cx: &CodegenCx<'a, 'tcx>,
type_: Ty<'tcx>,
) -> UniqueTypeId {
// Let's see if we already have something in the cache.
if let Some(unique_type_id) = self.type_to_unique_id.get(&type_).cloned() {
return unique_type_id;
}
// If not, generate one.
// The hasher we are using to generate the UniqueTypeId. We want
// something that provides more than the 64 bits of the DefaultHasher.
let mut hasher = StableHasher::new();
let mut hcx = cx.tcx.create_stable_hashing_context();
let type_ = cx.tcx.erase_regions(&type_);
hcx.while_hashing_spans(false, |hcx| {
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
type_.hash_stable(hcx, &mut hasher);
});
});
let unique_type_id = hasher.finish::<Fingerprint>().to_hex();
let key = self.unique_id_interner.intern(&unique_type_id);
self.type_to_unique_id.insert(type_, UniqueTypeId(key));
UniqueTypeId(key)
}
/// Gets the `UniqueTypeId` for an enum variant. Enum variants are not really
/// types of their own, so they need special handling. We still need a
/// `UniqueTypeId` for them, since to debuginfo they *are* real types.
fn get_unique_type_id_of_enum_variant<'a>(
&mut self,
cx: &CodegenCx<'a, 'tcx>,
enum_type: Ty<'tcx>,
variant_name: &str,
) -> UniqueTypeId {
let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type);
let enum_variant_type_id =
format!("{}::{}", self.get_unique_type_id_as_string(enum_type_id), variant_name);
let interner_key = self.unique_id_interner.intern(&enum_variant_type_id);
UniqueTypeId(interner_key)
}
/// Gets the unique type ID string for an enum variant part.
/// Variant parts are not types and shouldn't really have their own ID,
/// but it makes `set_members_of_composite_type()` simpler.
fn get_unique_type_id_str_of_enum_variant_part(
&mut self,
enum_type_id: UniqueTypeId,
) -> String {
format!("{}_variant_part", self.get_unique_type_id_as_string(enum_type_id))
}
}
/// A description of some recursive type. It can either be already finished (as
/// with `FinalMetadata`) or it is not yet finished, but contains all information
/// needed to generate the missing parts of the description. See the
/// documentation section on Recursive Types at the top of this file for more
/// information.
enum RecursiveTypeDescription<'ll, 'tcx> {
UnfinishedMetadata {
unfinished_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
metadata_stub: &'ll DICompositeType,
member_holding_stub: &'ll DICompositeType,
member_description_factory: MemberDescriptionFactory<'ll, 'tcx>,
},
FinalMetadata(&'ll DICompositeType),
}
fn create_and_register_recursive_type_forward_declaration(
cx: &CodegenCx<'ll, 'tcx>,
unfinished_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
metadata_stub: &'ll DICompositeType,
member_holding_stub: &'ll DICompositeType,
member_description_factory: MemberDescriptionFactory<'ll, 'tcx>,
) -> RecursiveTypeDescription<'ll, 'tcx> {
// Insert the stub into the `TypeMap` in order to allow for recursive references.
let mut type_map = debug_context(cx).type_map.borrow_mut();
type_map.register_unique_id_with_metadata(unique_type_id, metadata_stub);
type_map.register_type_with_metadata(unfinished_type, metadata_stub);
UnfinishedMetadata {
unfinished_type,
unique_type_id,
metadata_stub,
member_holding_stub,
member_description_factory,
}
}
impl RecursiveTypeDescription<'ll, 'tcx> {
/// Finishes up the description of the type in question (mostly by providing
/// descriptions of the fields of the given type) and returns the final type
/// metadata.
fn finalize(&self, cx: &CodegenCx<'ll, 'tcx>) -> MetadataCreationResult<'ll> {
match *self {
FinalMetadata(metadata) => MetadataCreationResult::new(metadata, false),
UnfinishedMetadata {
unfinished_type,
unique_type_id,
metadata_stub,
member_holding_stub,
ref member_description_factory,
} => {
// Make sure that we have a forward declaration of the type in
// the TypeMap so that recursive references are possible. This
// will always be the case if the RecursiveTypeDescription has
// been properly created through the
// `create_and_register_recursive_type_forward_declaration()`
// function.
{
let type_map = debug_context(cx).type_map.borrow();
if type_map.find_metadata_for_unique_id(unique_type_id).is_none()
|| type_map.find_metadata_for_type(unfinished_type).is_none()
{
bug!(
"Forward declaration of potentially recursive type \
'{:?}' was not found in TypeMap!",
unfinished_type
);
}
}
// ... then create the member descriptions ...
let member_descriptions = member_description_factory.create_member_descriptions(cx);
// ... and attach them to the stub to complete it.
set_members_of_composite_type(
cx,
unfinished_type,
member_holding_stub,
member_descriptions,
);
MetadataCreationResult::new(metadata_stub, true)
}
}
}
}
/// Returns from the enclosing function if the type metadata with the given
/// unique ID can be found in the type map.
macro_rules! return_if_metadata_created_in_meantime {
($cx: expr, $unique_type_id: expr) => {
if let Some(metadata) =
debug_context($cx).type_map.borrow().find_metadata_for_unique_id($unique_type_id)
{
return MetadataCreationResult::new(metadata, true);
}
};
}
fn fixed_vec_metadata(
cx: &CodegenCx<'ll, 'tcx>,
unique_type_id: UniqueTypeId,
array_or_slice_type: Ty<'tcx>,
element_type: Ty<'tcx>,
span: Span,
) -> MetadataCreationResult<'ll> {
let element_type_metadata = type_metadata(cx, element_type, span);
return_if_metadata_created_in_meantime!(cx, unique_type_id);
let (size, align) = cx.size_and_align_of(array_or_slice_type);
let upper_bound = match array_or_slice_type.kind() {
ty::Array(_, len) => len.eval_usize(cx.tcx, ty::ParamEnv::reveal_all()) as c_longlong,
_ => -1,
};
let subrange =
unsafe { Some(llvm::LLVMRustDIBuilderGetOrCreateSubrange(DIB(cx), 0, upper_bound)) };
let subscripts = create_DIArray(DIB(cx), &[subrange]);
let metadata = unsafe {
llvm::LLVMRustDIBuilderCreateArrayType(
DIB(cx),
size.bits(),
align.bits() as u32,
element_type_metadata,
subscripts,
)
};
MetadataCreationResult::new(metadata, false)
}
fn vec_slice_metadata(
cx: &CodegenCx<'ll, 'tcx>,
slice_ptr_type: Ty<'tcx>,
element_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
span: Span,
) -> MetadataCreationResult<'ll> {
let data_ptr_type = cx.tcx.mk_imm_ptr(element_type);
let data_ptr_metadata = type_metadata(cx, data_ptr_type, span);
return_if_metadata_created_in_meantime!(cx, unique_type_id);
let slice_type_name = compute_debuginfo_type_name(cx.tcx, slice_ptr_type, true);
let (pointer_size, pointer_align) = cx.size_and_align_of(data_ptr_type);
let (usize_size, usize_align) = cx.size_and_align_of(cx.tcx.types.usize);
let member_descriptions = vec![
MemberDescription {
name: "data_ptr".to_owned(),
type_metadata: data_ptr_metadata,
offset: Size::ZERO,
size: pointer_size,
align: pointer_align,
flags: DIFlags::FlagZero,
discriminant: None,
source_info: None,
},
MemberDescription {
name: "length".to_owned(),
type_metadata: type_metadata(cx, cx.tcx.types.usize, span),
offset: pointer_size,
size: usize_size,
align: usize_align,
flags: DIFlags::FlagZero,
discriminant: None,
source_info: None,
},
];
let file_metadata = unknown_file_metadata(cx);
let metadata = composite_type_metadata(
cx,
slice_ptr_type,
&slice_type_name[..],
unique_type_id,
member_descriptions,
NO_SCOPE_METADATA,
file_metadata,
span,
);
MetadataCreationResult::new(metadata, false)
}
fn subroutine_type_metadata(
cx: &CodegenCx<'ll, 'tcx>,
unique_type_id: UniqueTypeId,
signature: ty::PolyFnSig<'tcx>,
span: Span,
) -> MetadataCreationResult<'ll> {
let signature =
cx.tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &signature);
let signature_metadata: Vec<_> = iter::once(
// return type
match signature.output().kind() {
ty::Tuple(ref tys) if tys.is_empty() => None,
_ => Some(type_metadata(cx, signature.output(), span)),
},
)
.chain(
// regular arguments
signature.inputs().iter().map(|argument_type| Some(type_metadata(cx, argument_type, span))),
)
.collect();
return_if_metadata_created_in_meantime!(cx, unique_type_id);
MetadataCreationResult::new(
unsafe {
llvm::LLVMRustDIBuilderCreateSubroutineType(
DIB(cx),
create_DIArray(DIB(cx), &signature_metadata[..]),
)
},
false,
)
}
// FIXME(1563): This is all a bit of a hack because 'trait pointer' is an ill-
// defined concept. For the case of an actual trait pointer (i.e., `Box<Trait>`,
// `&Trait`), `trait_object_type` should be the whole thing (e.g, `Box<Trait>`) and
// `trait_type` should be the actual trait (e.g., `Trait`). Where the trait is part
// of a DST struct, there is no `trait_object_type` and the results of this
// function will be a little bit weird.
fn trait_pointer_metadata(
cx: &CodegenCx<'ll, 'tcx>,
trait_type: Ty<'tcx>,
trait_object_type: Option<Ty<'tcx>>,
unique_type_id: UniqueTypeId,
) -> &'ll DIType {
// The implementation provided here is a stub. It makes sure that the trait
// type is assigned the correct name, size, namespace, and source location.
// However, it does not describe the trait's methods.
let containing_scope = match trait_type.kind() {
ty::Dynamic(ref data, ..) => {
data.principal_def_id().map(|did| get_namespace_for_item(cx, did))
}
_ => {
bug!(
"debuginfo: unexpected trait-object type in \
trait_pointer_metadata(): {:?}",
trait_type
);
}
};
let trait_object_type = trait_object_type.unwrap_or(trait_type);
let trait_type_name = compute_debuginfo_type_name(cx.tcx, trait_object_type, false);
let file_metadata = unknown_file_metadata(cx);
let layout = cx.layout_of(cx.tcx.mk_mut_ptr(trait_type));
assert_eq!(abi::FAT_PTR_ADDR, 0);
assert_eq!(abi::FAT_PTR_EXTRA, 1);
let data_ptr_field = layout.field(cx, 0);
let vtable_field = layout.field(cx, 1);
let member_descriptions = vec![
MemberDescription {
name: "pointer".to_owned(),
type_metadata: type_metadata(
cx,
cx.tcx.mk_mut_ptr(cx.tcx.types.u8),
rustc_span::DUMMY_SP,
),
offset: layout.fields.offset(0),
size: data_ptr_field.size,
align: data_ptr_field.align.abi,
flags: DIFlags::FlagArtificial,
discriminant: None,
source_info: None,
},
MemberDescription {
name: "vtable".to_owned(),
type_metadata: type_metadata(cx, vtable_field.ty, rustc_span::DUMMY_SP),
offset: layout.fields.offset(1),
size: vtable_field.size,
align: vtable_field.align.abi,
flags: DIFlags::FlagArtificial,
discriminant: None,
source_info: None,
},
];
composite_type_metadata(
cx,
trait_object_type,
&trait_type_name[..],
unique_type_id,
member_descriptions,
containing_scope,
file_metadata,
rustc_span::DUMMY_SP,
)
}
pub fn type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>, usage_site_span: Span) -> &'ll DIType {
// Get the unique type ID of this type.
let unique_type_id = {
let mut type_map = debug_context(cx).type_map.borrow_mut();
// First, try to find the type in `TypeMap`. If we have seen it before, we
// can exit early here.
match type_map.find_metadata_for_type(t) {
Some(metadata) => {
return metadata;
}
None => {
// The Ty is not in the `TypeMap` but maybe we have already seen
// an equivalent type (e.g., only differing in region arguments).
// In order to find out, generate the unique type ID and look
// that up.
let unique_type_id = type_map.get_unique_type_id_of_type(cx, t);
match type_map.find_metadata_for_unique_id(unique_type_id) {
Some(metadata) => {
// There is already an equivalent type in the TypeMap.
// Register this Ty as an alias in the cache and
// return the cached metadata.
type_map.register_type_with_metadata(t, metadata);
return metadata;
}
None => {
// There really is no type metadata for this type, so
// proceed by creating it.
unique_type_id
}
}
}
}
};
debug!("type_metadata: {:?}", t);
let ptr_metadata = |ty: Ty<'tcx>| match *ty.kind() {
ty::Slice(typ) => Ok(vec_slice_metadata(cx, t, typ, unique_type_id, usage_site_span)),
ty::Str => Ok(vec_slice_metadata(cx, t, cx.tcx.types.u8, unique_type_id, usage_site_span)),
ty::Dynamic(..) => Ok(MetadataCreationResult::new(
trait_pointer_metadata(cx, ty, Some(t), unique_type_id),
false,
)),
_ => {
let pointee_metadata = type_metadata(cx, ty, usage_site_span);
if let Some(metadata) =
debug_context(cx).type_map.borrow().find_metadata_for_unique_id(unique_type_id)
{
return Err(metadata);
}
Ok(MetadataCreationResult::new(pointer_type_metadata(cx, t, pointee_metadata), false))
}
};
let MetadataCreationResult { metadata, already_stored_in_typemap } = match *t.kind() {
ty::Never | ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) => {
MetadataCreationResult::new(basic_type_metadata(cx, t), false)
}
ty::Tuple(ref elements) if elements.is_empty() => {
MetadataCreationResult::new(basic_type_metadata(cx, t), false)
}
ty::Array(typ, _) | ty::Slice(typ) => {
fixed_vec_metadata(cx, unique_type_id, t, typ, usage_site_span)
}
ty::Str => fixed_vec_metadata(cx, unique_type_id, t, cx.tcx.types.i8, usage_site_span),
ty::Dynamic(..) => {
MetadataCreationResult::new(trait_pointer_metadata(cx, t, None, unique_type_id), false)
}
ty::Foreign(..) => {
MetadataCreationResult::new(foreign_type_metadata(cx, t, unique_type_id), false)
}
ty::RawPtr(ty::TypeAndMut { ty, .. }) | ty::Ref(_, ty, _) => match ptr_metadata(ty) {
Ok(res) => res,
Err(metadata) => return metadata,
},
ty::Adt(def, _) if def.is_box() => match ptr_metadata(t.boxed_ty()) {
Ok(res) => res,
Err(metadata) => return metadata,
},
ty::FnDef(..) | ty::FnPtr(_) => {
if let Some(metadata) =
debug_context(cx).type_map.borrow().find_metadata_for_unique_id(unique_type_id)
{
return metadata;
}
// It's possible to create a self-referential
// type in Rust by using 'impl trait':
//
// fn foo() -> impl Copy { foo }
//
// See `TypeMap::remove_type` for more detals
// about the workaround.
let temp_type = {
unsafe {
// The choice of type here is pretty arbitrary -
// anything reading the debuginfo for a recursive
// type is going to see *something* weird - the only
// question is what exactly it will see.
let name = "<recur_type>";
llvm::LLVMRustDIBuilderCreateBasicType(
DIB(cx),
name.as_ptr().cast(),
name.len(),
cx.size_of(t).bits(),
DW_ATE_unsigned,
)
}
};
let type_map = &debug_context(cx).type_map;
type_map.borrow_mut().register_type_with_metadata(t, temp_type);
let fn_metadata =
subroutine_type_metadata(cx, unique_type_id, t.fn_sig(cx.tcx), usage_site_span)
.metadata;
type_map.borrow_mut().remove_type(t);
// This is actually a function pointer, so wrap it in pointer DI.
MetadataCreationResult::new(pointer_type_metadata(cx, t, fn_metadata), false)
}
ty::Closure(def_id, substs) => {
let upvar_tys: Vec<_> = substs.as_closure().upvar_tys().collect();
let containing_scope = get_namespace_for_item(cx, def_id);
prepare_tuple_metadata(
cx,
t,
&upvar_tys,
unique_type_id,
usage_site_span,
Some(containing_scope),
)
.finalize(cx)
}
ty::Generator(def_id, substs, _) => {
let upvar_tys: Vec<_> = substs
.as_generator()
.prefix_tys()
.map(|t| cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t))
.collect();
prepare_enum_metadata(cx, t, def_id, unique_type_id, usage_site_span, upvar_tys)
.finalize(cx)
}
ty::Adt(def, ..) => match def.adt_kind() {
AdtKind::Struct => {
prepare_struct_metadata(cx, t, unique_type_id, usage_site_span).finalize(cx)
}
AdtKind::Union => {
prepare_union_metadata(cx, t, unique_type_id, usage_site_span).finalize(cx)
}
AdtKind::Enum => {
prepare_enum_metadata(cx, t, def.did, unique_type_id, usage_site_span, vec![])
.finalize(cx)
}
},
ty::Tuple(ref elements) => {
let tys: Vec<_> = elements.iter().map(|k| k.expect_ty()).collect();
prepare_tuple_metadata(cx, t, &tys, unique_type_id, usage_site_span, NO_SCOPE_METADATA)
.finalize(cx)
}
// Type parameters from polymorphized functions.
ty::Param(_) => MetadataCreationResult::new(param_type_metadata(cx, t), false),
_ => bug!("debuginfo: unexpected type in type_metadata: {:?}", t),
};
{
let mut type_map = debug_context(cx).type_map.borrow_mut();
if already_stored_in_typemap {
// Also make sure that we already have a `TypeMap` entry for the unique type ID.
let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) {
Some(metadata) => metadata,
None => {
span_bug!(
usage_site_span,
"expected type metadata for unique \
type ID '{}' to already be in \
the `debuginfo::TypeMap` but it \
was not. (Ty = {})",
type_map.get_unique_type_id_as_string(unique_type_id),
t
);
}
};
match type_map.find_metadata_for_type(t) {
Some(metadata) => {
if metadata != metadata_for_uid {
span_bug!(
usage_site_span,
"mismatch between `Ty` and \
`UniqueTypeId` maps in \
`debuginfo::TypeMap`. \
UniqueTypeId={}, Ty={}",
type_map.get_unique_type_id_as_string(unique_type_id),
t
);
}
}
None => {
type_map.register_type_with_metadata(t, metadata);
}
}
} else {
type_map.register_type_with_metadata(t, metadata);
type_map.register_unique_id_with_metadata(unique_type_id, metadata);
}
}
metadata
}
fn hex_encode(data: &[u8]) -> String {
let mut hex_string = String::with_capacity(data.len() * 2);
for byte in data.iter() {
write!(&mut hex_string, "{:02x}", byte).unwrap();
}
hex_string
}
pub fn file_metadata(cx: &CodegenCx<'ll, '_>, source_file: &SourceFile) -> &'ll DIFile {
debug!("file_metadata: file_name: {}", source_file.name);
let hash = Some(&source_file.src_hash);
let file_name = Some(source_file.name.to_string());
let directory = if source_file.is_real_file() && !source_file.is_imported() {
Some(cx.sess().working_dir.0.to_string_lossy().to_string())
} else {
// If the path comes from an upstream crate we assume it has been made
// independent of the compiler's working directory one way or another.
None
};
file_metadata_raw(cx, file_name, directory, hash)
}
pub fn unknown_file_metadata(cx: &CodegenCx<'ll, '_>) -> &'ll DIFile {
file_metadata_raw(cx, None, None, None)
}
fn file_metadata_raw(
cx: &CodegenCx<'ll, '_>,
file_name: Option<String>,
directory: Option<String>,
hash: Option<&SourceFileHash>,
) -> &'ll DIFile {
let key = (file_name, directory);
match debug_context(cx).created_files.borrow_mut().entry(key) {
Entry::Occupied(o) => o.get(),
Entry::Vacant(v) => {
let (file_name, directory) = v.key();
debug!("file_metadata: file_name: {:?}, directory: {:?}", file_name, directory);
let file_name = file_name.as_deref().unwrap_or("<unknown>");
let directory = directory.as_deref().unwrap_or("");
let (hash_kind, hash_value) = match hash {
Some(hash) => {
let kind = match hash.kind {
rustc_span::SourceFileHashAlgorithm::Md5 => llvm::ChecksumKind::MD5,
rustc_span::SourceFileHashAlgorithm::Sha1 => llvm::ChecksumKind::SHA1,
rustc_span::SourceFileHashAlgorithm::Sha256 => llvm::ChecksumKind::SHA256,
};
(kind, hex_encode(hash.hash_bytes()))
}
None => (llvm::ChecksumKind::None, String::new()),
};
let file_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateFile(
DIB(cx),
file_name.as_ptr().cast(),
file_name.len(),
directory.as_ptr().cast(),
directory.len(),
hash_kind,
hash_value.as_ptr().cast(),
hash_value.len(),
)
};
v.insert(file_metadata);
file_metadata
}
}
}
trait MsvcBasicName {
fn msvc_basic_name(self) -> &'static str;
}
impl MsvcBasicName for ast::IntTy {
fn msvc_basic_name(self) -> &'static str {
match self {
ast::IntTy::Isize => "ptrdiff_t",
ast::IntTy::I8 => "__int8",
ast::IntTy::I16 => "__int16",
ast::IntTy::I32 => "__int32",
ast::IntTy::I64 => "__int64",
ast::IntTy::I128 => "__int128",
}
}
}
impl MsvcBasicName for ast::UintTy {
fn msvc_basic_name(self) -> &'static str {
match self {
ast::UintTy::Usize => "size_t",
ast::UintTy::U8 => "unsigned __int8",
ast::UintTy::U16 => "unsigned __int16",
ast::UintTy::U32 => "unsigned __int32",
ast::UintTy::U64 => "unsigned __int64",
ast::UintTy::U128 => "unsigned __int128",
}
}
}
impl MsvcBasicName for ast::FloatTy {
fn msvc_basic_name(self) -> &'static str {
match self {
ast::FloatTy::F32 => "float",
ast::FloatTy::F64 => "double",
}
}
}
fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
debug!("basic_type_metadata: {:?}", t);
// When targeting MSVC, emit MSVC style type names for compatibility with
// .natvis visualizers (and perhaps other existing native debuggers?)
let msvc_like_names = cx.tcx.sess.target.options.is_like_msvc;
let (name, encoding) = match t.kind() {
ty::Never => ("!", DW_ATE_unsigned),
ty::Tuple(ref elements) if elements.is_empty() => ("()", DW_ATE_unsigned),
ty::Bool => ("bool", DW_ATE_boolean),
ty::Char => ("char", DW_ATE_unsigned_char),
ty::Int(int_ty) if msvc_like_names => (int_ty.msvc_basic_name(), DW_ATE_signed),
ty::Uint(uint_ty) if msvc_like_names => (uint_ty.msvc_basic_name(), DW_ATE_unsigned),
ty::Float(float_ty) if msvc_like_names => (float_ty.msvc_basic_name(), DW_ATE_float),
ty::Int(int_ty) => (int_ty.name_str(), DW_ATE_signed),
ty::Uint(uint_ty) => (uint_ty.name_str(), DW_ATE_unsigned),
ty::Float(float_ty) => (float_ty.name_str(), DW_ATE_float),
_ => bug!("debuginfo::basic_type_metadata - `t` is invalid type"),
};
let ty_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateBasicType(
DIB(cx),
name.as_ptr().cast(),
name.len(),
cx.size_of(t).bits(),
encoding,
)
};
if !msvc_like_names {
return ty_metadata;
}
let typedef_name = match t.kind() {
ty::Int(int_ty) => int_ty.name_str(),
ty::Uint(uint_ty) => uint_ty.name_str(),
ty::Float(float_ty) => float_ty.name_str(),
_ => return ty_metadata,
};
let typedef_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateTypedef(
DIB(cx),
ty_metadata,
typedef_name.as_ptr().cast(),
typedef_name.len(),
unknown_file_metadata(cx),
0,
None,
)
};
typedef_metadata
}
fn foreign_type_metadata(
cx: &CodegenCx<'ll, 'tcx>,
t: Ty<'tcx>,
unique_type_id: UniqueTypeId,
) -> &'ll DIType {
debug!("foreign_type_metadata: {:?}", t);
let name = compute_debuginfo_type_name(cx.tcx, t, false);
create_struct_stub(cx, t, &name, unique_type_id, NO_SCOPE_METADATA, DIFlags::FlagZero)
}
fn pointer_type_metadata(
cx: &CodegenCx<'ll, 'tcx>,
pointer_type: Ty<'tcx>,
pointee_type_metadata: &'ll DIType,
) -> &'ll DIType {
let (pointer_size, pointer_align) = cx.size_and_align_of(pointer_type);
let name = compute_debuginfo_type_name(cx.tcx, pointer_type, false);
unsafe {
llvm::LLVMRustDIBuilderCreatePointerType(
DIB(cx),
pointee_type_metadata,
pointer_size.bits(),
pointer_align.bits() as u32,
0, // Ignore DWARF address space.
name.as_ptr().cast(),
name.len(),
)
}
}
fn param_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
debug!("param_type_metadata: {:?}", t);
let name = format!("{:?}", t);
unsafe {
llvm::LLVMRustDIBuilderCreateBasicType(
DIB(cx),
name.as_ptr().cast(),
name.len(),
Size::ZERO.bits(),
DW_ATE_unsigned,
)
}
}
pub fn compile_unit_metadata(
tcx: TyCtxt<'_>,
codegen_unit_name: &str,
debug_context: &CrateDebugContext<'ll, '_>,
) -> &'ll DIDescriptor {
let mut name_in_debuginfo = match tcx.sess.local_crate_source_file {
Some(ref path) => path.clone(),
None => PathBuf::from(&*tcx.crate_name(LOCAL_CRATE).as_str()),
};
// The OSX linker has an idiosyncrasy where it will ignore some debuginfo
// if multiple object files with the same `DW_AT_name` are linked together.
// As a workaround we generate unique names for each object file. Those do
// not correspond to an actual source file but that should be harmless.
if tcx.sess.target.options.is_like_osx {
name_in_debuginfo.push("@");
name_in_debuginfo.push(codegen_unit_name);
}
debug!("compile_unit_metadata: {:?}", name_in_debuginfo);
let rustc_producer =
format!("rustc version {}", option_env!("CFG_VERSION").expect("CFG_VERSION"),);
// FIXME(#41252) Remove "clang LLVM" if we can get GDB and LLVM to play nice.
let producer = format!("clang LLVM ({})", rustc_producer);
let name_in_debuginfo = name_in_debuginfo.to_string_lossy();
let work_dir = tcx.sess.working_dir.0.to_string_lossy();
let flags = "\0";
let split_name = "";
// FIXME(#60020):
//
// This should actually be
//
// let kind = DebugEmissionKind::from_generic(tcx.sess.opts.debuginfo);
//
// That is, we should set LLVM's emission kind to `LineTablesOnly` if
// we are compiling with "limited" debuginfo. However, some of the
// existing tools relied on slightly more debuginfo being generated than
// would be the case with `LineTablesOnly`, and we did not want to break
// these tools in a "drive-by fix", without a good idea or plan about
// what limited debuginfo should exactly look like. So for now we keep
// the emission kind as `FullDebug`.
//
// See https://github.com/rust-lang/rust/issues/60020 for details.
let kind = DebugEmissionKind::FullDebug;
assert!(tcx.sess.opts.debuginfo != DebugInfo::None);
unsafe {
let file_metadata = llvm::LLVMRustDIBuilderCreateFile(
debug_context.builder,
name_in_debuginfo.as_ptr().cast(),
name_in_debuginfo.len(),
work_dir.as_ptr().cast(),
work_dir.len(),
llvm::ChecksumKind::None,
ptr::null(),
0,
);
let unit_metadata = llvm::LLVMRustDIBuilderCreateCompileUnit(
debug_context.builder,
DW_LANG_RUST,
file_metadata,
producer.as_ptr().cast(),
producer.len(),
tcx.sess.opts.optimize != config::OptLevel::No,
flags.as_ptr().cast(),
0,
split_name.as_ptr().cast(),
split_name.len(),
kind,
);
if tcx.sess.opts.debugging_opts.profile {
let cu_desc_metadata =
llvm::LLVMRustMetadataAsValue(debug_context.llcontext, unit_metadata);
let default_gcda_path = &tcx.output_filenames(LOCAL_CRATE).with_extension("gcda");
let gcda_path =
tcx.sess.opts.debugging_opts.profile_emit.as_ref().unwrap_or(default_gcda_path);
let gcov_cu_info = [
path_to_mdstring(
debug_context.llcontext,
&tcx.output_filenames(LOCAL_CRATE).with_extension("gcno"),
),
path_to_mdstring(debug_context.llcontext, &gcda_path),
cu_desc_metadata,
];
let gcov_metadata = llvm::LLVMMDNodeInContext(
debug_context.llcontext,
gcov_cu_info.as_ptr(),
gcov_cu_info.len() as c_uint,
);
let llvm_gcov_ident = const_cstr!("llvm.gcov");
llvm::LLVMAddNamedMetadataOperand(
debug_context.llmod,
llvm_gcov_ident.as_ptr(),
gcov_metadata,
);
}
// Insert `llvm.ident` metadata on the wasm32 targets since that will
// get hooked up to the "producer" sections `processed-by` information.
if tcx.sess.opts.target_triple.triple().starts_with("wasm32") {
let name_metadata = llvm::LLVMMDStringInContext(
debug_context.llcontext,
rustc_producer.as_ptr().cast(),
rustc_producer.as_bytes().len() as c_uint,
);
llvm::LLVMAddNamedMetadataOperand(
debug_context.llmod,
const_cstr!("llvm.ident").as_ptr(),
llvm::LLVMMDNodeInContext(debug_context.llcontext, &name_metadata, 1),
);
}
return unit_metadata;
};
fn path_to_mdstring(llcx: &'ll llvm::Context, path: &Path) -> &'ll Value {
let path_str = path_to_c_string(path);
unsafe {
llvm::LLVMMDStringInContext(
llcx,
path_str.as_ptr(),
path_str.as_bytes().len() as c_uint,
)
}
}
}
struct MetadataCreationResult<'ll> {
metadata: &'ll DIType,
already_stored_in_typemap: bool,
}
impl MetadataCreationResult<'ll> {
fn new(metadata: &'ll DIType, already_stored_in_typemap: bool) -> Self {
MetadataCreationResult { metadata, already_stored_in_typemap }
}
}
#[derive(Debug)]
struct SourceInfo<'ll> {
file: &'ll DIFile,
line: u32,
}
/// Description of a type member, which can either be a regular field (as in
/// structs or tuples) or an enum variant.
#[derive(Debug)]
struct MemberDescription<'ll> {
name: String,
type_metadata: &'ll DIType,
offset: Size,
size: Size,
align: Align,
flags: DIFlags,
discriminant: Option<u64>,
source_info: Option<SourceInfo<'ll>>,
}
impl<'ll> MemberDescription<'ll> {
fn into_metadata(
self,
cx: &CodegenCx<'ll, '_>,
composite_type_metadata: &'ll DIScope,
) -> &'ll DIType {
let (file, line) = self
.source_info
.map(|info| (info.file, info.line))
.unwrap_or_else(|| (unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER));
unsafe {
llvm::LLVMRustDIBuilderCreateVariantMemberType(
DIB(cx),
composite_type_metadata,
self.name.as_ptr().cast(),
self.name.len(),
file,
line,
self.size.bits(),
self.align.bits() as u32,
self.offset.bits(),
match self.discriminant {
None => None,
Some(value) => Some(cx.const_u64(value)),
},
self.flags,
self.type_metadata,
)
}
}
}
/// A factory for `MemberDescription`s. It produces a list of member descriptions
/// for some record-like type. `MemberDescriptionFactory`s are used to defer the
/// creation of type member descriptions in order to break cycles arising from
/// recursive type definitions.
enum MemberDescriptionFactory<'ll, 'tcx> {
StructMDF(StructMemberDescriptionFactory<'tcx>),
TupleMDF(TupleMemberDescriptionFactory<'tcx>),
EnumMDF(EnumMemberDescriptionFactory<'ll, 'tcx>),
UnionMDF(UnionMemberDescriptionFactory<'tcx>),
VariantMDF(VariantMemberDescriptionFactory<'ll, 'tcx>),
}
impl MemberDescriptionFactory<'ll, 'tcx> {
fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> {
match *self {
StructMDF(ref this) => this.create_member_descriptions(cx),
TupleMDF(ref this) => this.create_member_descriptions(cx),
EnumMDF(ref this) => this.create_member_descriptions(cx),
UnionMDF(ref this) => this.create_member_descriptions(cx),
VariantMDF(ref this) => this.create_member_descriptions(cx),
}
}
}
//=-----------------------------------------------------------------------------
// Structs
//=-----------------------------------------------------------------------------
/// Creates `MemberDescription`s for the fields of a struct.
struct StructMemberDescriptionFactory<'tcx> {
ty: Ty<'tcx>,
variant: &'tcx ty::VariantDef,
span: Span,
}
impl<'tcx> StructMemberDescriptionFactory<'tcx> {
fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> {
let layout = cx.layout_of(self.ty);
self.variant
.fields
.iter()
.enumerate()
.map(|(i, f)| {
let name = if self.variant.ctor_kind == CtorKind::Fn {
format!("__{}", i)
} else {
f.ident.to_string()
};
let field = layout.field(cx, i);
MemberDescription {
name,
type_metadata: type_metadata(cx, field.ty, self.span),
offset: layout.fields.offset(i),
size: field.size,
align: field.align.abi,
flags: DIFlags::FlagZero,
discriminant: None,
source_info: None,
}
})
.collect()
}
}
fn prepare_struct_metadata(
cx: &CodegenCx<'ll, 'tcx>,
struct_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
span: Span,
) -> RecursiveTypeDescription<'ll, 'tcx> {
let struct_name = compute_debuginfo_type_name(cx.tcx, struct_type, false);
let (struct_def_id, variant) = match struct_type.kind() {
ty::Adt(def, _) => (def.did, def.non_enum_variant()),
_ => bug!("prepare_struct_metadata on a non-ADT"),
};
let containing_scope = get_namespace_for_item(cx, struct_def_id);
let struct_metadata_stub = create_struct_stub(
cx,
struct_type,
&struct_name,
unique_type_id,
Some(containing_scope),
DIFlags::FlagZero,
);
create_and_register_recursive_type_forward_declaration(
cx,
struct_type,
unique_type_id,
struct_metadata_stub,
struct_metadata_stub,
StructMDF(StructMemberDescriptionFactory { ty: struct_type, variant, span }),
)
}
//=-----------------------------------------------------------------------------
// Tuples
//=-----------------------------------------------------------------------------
/// Creates `MemberDescription`s for the fields of a tuple.
struct TupleMemberDescriptionFactory<'tcx> {
ty: Ty<'tcx>,
component_types: Vec<Ty<'tcx>>,
span: Span,
}
impl<'tcx> TupleMemberDescriptionFactory<'tcx> {
fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> {
let layout = cx.layout_of(self.ty);
self.component_types
.iter()
.enumerate()
.map(|(i, &component_type)| {
let (size, align) = cx.size_and_align_of(component_type);
MemberDescription {
name: format!("__{}", i),
type_metadata: type_metadata(cx, component_type, self.span),
offset: layout.fields.offset(i),
size,
align,
flags: DIFlags::FlagZero,
discriminant: None,
source_info: None,
}
})
.collect()
}
}
fn prepare_tuple_metadata(
cx: &CodegenCx<'ll, 'tcx>,
tuple_type: Ty<'tcx>,
component_types: &[Ty<'tcx>],
unique_type_id: UniqueTypeId,
span: Span,
containing_scope: Option<&'ll DIScope>,
) -> RecursiveTypeDescription<'ll, 'tcx> {
let tuple_name = compute_debuginfo_type_name(cx.tcx, tuple_type, false);
let struct_stub = create_struct_stub(
cx,
tuple_type,
&tuple_name[..],
unique_type_id,
containing_scope,
DIFlags::FlagZero,
);
create_and_register_recursive_type_forward_declaration(
cx,
tuple_type,
unique_type_id,
struct_stub,
struct_stub,
TupleMDF(TupleMemberDescriptionFactory {
ty: tuple_type,
component_types: component_types.to_vec(),
span,
}),
)
}
//=-----------------------------------------------------------------------------
// Unions
//=-----------------------------------------------------------------------------
struct UnionMemberDescriptionFactory<'tcx> {
layout: TyAndLayout<'tcx>,
variant: &'tcx ty::VariantDef,
span: Span,
}
impl<'tcx> UnionMemberDescriptionFactory<'tcx> {
fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> {
self.variant
.fields
.iter()
.enumerate()
.map(|(i, f)| {
let field = self.layout.field(cx, i);
MemberDescription {
name: f.ident.to_string(),
type_metadata: type_metadata(cx, field.ty, self.span),
offset: Size::ZERO,
size: field.size,
align: field.align.abi,
flags: DIFlags::FlagZero,
discriminant: None,
source_info: None,
}
})
.collect()
}
}
fn prepare_union_metadata(
cx: &CodegenCx<'ll, 'tcx>,
union_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
span: Span,
) -> RecursiveTypeDescription<'ll, 'tcx> {
let union_name = compute_debuginfo_type_name(cx.tcx, union_type, false);
let (union_def_id, variant) = match union_type.kind() {
ty::Adt(def, _) => (def.did, def.non_enum_variant()),
_ => bug!("prepare_union_metadata on a non-ADT"),
};
let containing_scope = get_namespace_for_item(cx, union_def_id);
let union_metadata_stub =
create_union_stub(cx, union_type, &union_name, unique_type_id, containing_scope);
create_and_register_recursive_type_forward_declaration(
cx,
union_type,
unique_type_id,
union_metadata_stub,
union_metadata_stub,
UnionMDF(UnionMemberDescriptionFactory { layout: cx.layout_of(union_type), variant, span }),
)
}
//=-----------------------------------------------------------------------------
// Enums
//=-----------------------------------------------------------------------------
/// DWARF variant support is only available starting in LLVM 8, but
/// on MSVC we have to use the fallback mode, because LLVM doesn't
/// lower variant parts to PDB.
fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool {
cx.sess().target.options.is_like_msvc
}
// FIXME(eddyb) maybe precompute this? Right now it's computed once
// per generator monomorphization, but it doesn't depend on substs.
fn generator_layout_and_saved_local_names(
tcx: TyCtxt<'tcx>,
def_id: DefId,
) -> (&'tcx GeneratorLayout<'tcx>, IndexVec<mir::GeneratorSavedLocal, Option<Symbol>>) {
let body = tcx.optimized_mir(def_id);
let generator_layout = body.generator_layout.as_ref().unwrap();
let mut generator_saved_local_names = IndexVec::from_elem(None, &generator_layout.field_tys);
let state_arg = mir::Local::new(1);
for var in &body.var_debug_info {
if var.place.local != state_arg {
continue;
}
match var.place.projection[..] {
[
// Deref of the `Pin<&mut Self>` state argument.
mir::ProjectionElem::Field(..),
mir::ProjectionElem::Deref,
// Field of a variant of the state.
mir::ProjectionElem::Downcast(_, variant),
mir::ProjectionElem::Field(field, _),
] => {
let name = &mut generator_saved_local_names[
generator_layout.variant_fields[variant][field]
];
if name.is_none() {
name.replace(var.name);
}
}
_ => {}
}
}
(generator_layout, generator_saved_local_names)
}
/// Describes the members of an enum value; an enum is described as a union of
/// structs in DWARF. This `MemberDescriptionFactory` provides the description for
/// the members of this union; so for every variant of the given enum, this
/// factory will produce one `MemberDescription` (all with no name and a fixed
/// offset of zero bytes).
struct EnumMemberDescriptionFactory<'ll, 'tcx> {
enum_type: Ty<'tcx>,
layout: TyAndLayout<'tcx>,
tag_type_metadata: Option<&'ll DIType>,
containing_scope: &'ll DIScope,
span: Span,
}
impl EnumMemberDescriptionFactory<'ll, 'tcx> {
fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> {
let generator_variant_info_data = match *self.enum_type.kind() {
ty::Generator(def_id, ..) => {
Some(generator_layout_and_saved_local_names(cx.tcx, def_id))
}
_ => None,
};
let variant_info_for = |index: VariantIdx| match *self.enum_type.kind() {
ty::Adt(adt, _) => VariantInfo::Adt(&adt.variants[index]),
ty::Generator(def_id, _, _) => {
let (generator_layout, generator_saved_local_names) =
generator_variant_info_data.as_ref().unwrap();
VariantInfo::Generator {
def_id,
generator_layout: *generator_layout,
generator_saved_local_names,
variant_index: index,
}
}
_ => bug!(),
};
// This will always find the metadata in the type map.
let fallback = use_enum_fallback(cx);
let self_metadata = if fallback {
self.containing_scope
} else {
type_metadata(cx, self.enum_type, self.span)
};
let flags = match self.enum_type.kind() {
ty::Generator(..) => DIFlags::FlagArtificial,
_ => DIFlags::FlagZero,
};
match self.layout.variants {
Variants::Single { index } => {
if let ty::Adt(adt, _) = self.enum_type.kind() {
if adt.variants.is_empty() {
return vec![];
}
}
let variant_info = variant_info_for(index);
let (variant_type_metadata, member_description_factory) = describe_enum_variant(
cx,
self.layout,
variant_info,
NoTag,
self_metadata,
self.span,
);
let member_descriptions = member_description_factory.create_member_descriptions(cx);
set_members_of_composite_type(
cx,
self.enum_type,
variant_type_metadata,
member_descriptions,
);
vec![MemberDescription {
name: if fallback { String::new() } else { variant_info.variant_name() },
type_metadata: variant_type_metadata,
offset: Size::ZERO,
size: self.layout.size,
align: self.layout.align.abi,
flags,
discriminant: None,
source_info: variant_info.source_info(cx),
}]
}
Variants::Multiple {
tag_encoding: TagEncoding::Direct,
tag_field,
ref variants,
..
} => {
let tag_info = if fallback {
RegularTag {
tag_field: Field::from(tag_field),
tag_type_metadata: self.tag_type_metadata.unwrap(),
}
} else {
// This doesn't matter in this case.
NoTag
};
variants
.iter_enumerated()
.map(|(i, _)| {
let variant = self.layout.for_variant(cx, i);
let variant_info = variant_info_for(i);
let (variant_type_metadata, member_desc_factory) = describe_enum_variant(
cx,
variant,
variant_info,
tag_info,
self_metadata,
self.span,
);
let member_descriptions =
member_desc_factory.create_member_descriptions(cx);
set_members_of_composite_type(
cx,
self.enum_type,
variant_type_metadata,
member_descriptions,
);
MemberDescription {
name: if fallback {
String::new()
} else {
variant_info.variant_name()
},
type_metadata: variant_type_metadata,
offset: Size::ZERO,
size: self.layout.size,
align: self.layout.align.abi,
flags,
discriminant: Some(
self.layout.ty.discriminant_for_variant(cx.tcx, i).unwrap().val
as u64,
),
source_info: variant_info.source_info(cx),
}
})
.collect()
}
Variants::Multiple {
tag_encoding:
TagEncoding::Niche { ref niche_variants, niche_start, dataful_variant },
ref tag,
ref variants,
tag_field,
} => {
if fallback {
let variant = self.layout.for_variant(cx, dataful_variant);
// Create a description of the non-null variant.
let (variant_type_metadata, member_description_factory) = describe_enum_variant(
cx,
variant,
variant_info_for(dataful_variant),
OptimizedTag,
self.containing_scope,
self.span,
);
let variant_member_descriptions =
member_description_factory.create_member_descriptions(cx);
set_members_of_composite_type(
cx,
self.enum_type,
variant_type_metadata,
variant_member_descriptions,
);
// Encode the information about the null variant in the union
// member's name.
let mut name = String::from("RUST$ENCODED$ENUM$");
// Right now it's not even going to work for `niche_start > 0`,
// and for multiple niche variants it only supports the first.
fn compute_field_path<'a, 'tcx>(
cx: &CodegenCx<'a, 'tcx>,
name: &mut String,
layout: TyAndLayout<'tcx>,
offset: Size,
size: Size,
) {
for i in 0..layout.fields.count() {
let field_offset = layout.fields.offset(i);
if field_offset > offset {
continue;
}
let inner_offset = offset - field_offset;
let field = layout.field(cx, i);
if inner_offset + size <= field.size {
write!(name, "{}$", i).unwrap();
compute_field_path(cx, name, field, inner_offset, size);
}
}
}
compute_field_path(
cx,
&mut name,
self.layout,
self.layout.fields.offset(tag_field),
self.layout.field(cx, tag_field).size,
);
let variant_info = variant_info_for(*niche_variants.start());
variant_info.map_struct_name(|variant_name| {
name.push_str(variant_name);
});
// Create the (singleton) list of descriptions of union members.
vec![MemberDescription {
name,
type_metadata: variant_type_metadata,
offset: Size::ZERO,
size: variant.size,
align: variant.align.abi,
flags,
discriminant: None,
source_info: variant_info.source_info(cx),
}]
} else {
variants
.iter_enumerated()
.map(|(i, _)| {
let variant = self.layout.for_variant(cx, i);
let variant_info = variant_info_for(i);
let (variant_type_metadata, member_desc_factory) =
describe_enum_variant(
cx,
variant,
variant_info,
OptimizedTag,
self_metadata,
self.span,
);
let member_descriptions =
member_desc_factory.create_member_descriptions(cx);
set_members_of_composite_type(
cx,
self.enum_type,
variant_type_metadata,
member_descriptions,
);
let niche_value = if i == dataful_variant {
None
} else {
let value = (i.as_u32() as u128)
.wrapping_sub(niche_variants.start().as_u32() as u128)
.wrapping_add(niche_start);
let value = truncate(value, tag.value.size(cx));
// NOTE(eddyb) do *NOT* remove this assert, until
// we pass the full 128-bit value to LLVM, otherwise
// truncation will be silent and remain undetected.
assert_eq!(value as u64 as u128, value);
Some(value as u64)
};
MemberDescription {
name: variant_info.variant_name(),
type_metadata: variant_type_metadata,
offset: Size::ZERO,
size: self.layout.size,
align: self.layout.align.abi,
flags,
discriminant: niche_value,
source_info: variant_info.source_info(cx),
}
})
.collect()
}
}
}
}
}
// Creates `MemberDescription`s for the fields of a single enum variant.
struct VariantMemberDescriptionFactory<'ll, 'tcx> {
/// Cloned from the `layout::Struct` describing the variant.
offsets: Vec<Size>,
args: Vec<(String, Ty<'tcx>)>,
tag_type_metadata: Option<&'ll DIType>,
span: Span,
}
impl VariantMemberDescriptionFactory<'ll, 'tcx> {
fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>) -> Vec<MemberDescription<'ll>> {
self.args
.iter()
.enumerate()
.map(|(i, &(ref name, ty))| {
// Discriminant is always the first field of our variant
// when using the enum fallback.
let is_artificial_discr = use_enum_fallback(cx) && i == 0;
let (size, align) = cx.size_and_align_of(ty);
MemberDescription {
name: name.to_string(),
type_metadata: if is_artificial_discr {
self.tag_type_metadata.unwrap_or_else(|| type_metadata(cx, ty, self.span))
} else {
type_metadata(cx, ty, self.span)
},
offset: self.offsets[i],
size,
align,
flags: if is_artificial_discr {
DIFlags::FlagArtificial
} else {
DIFlags::FlagZero
},
discriminant: None,
source_info: None,
}
})
.collect()
}
}
// FIXME: terminology here should be aligned with `abi::TagEncoding`.
// `OptimizedTag` is `TagEncoding::Niche`, `RegularTag` is `TagEncoding::Direct`.
// `NoTag` should be removed; users should use `Option<EnumTagInfo>` instead.
#[derive(Copy, Clone)]
enum EnumTagInfo<'ll> {
RegularTag { tag_field: Field, tag_type_metadata: &'ll DIType },
OptimizedTag,
NoTag,
}
#[derive(Copy, Clone)]
enum VariantInfo<'a, 'tcx> {
Adt(&'tcx ty::VariantDef),
Generator {
def_id: DefId,
generator_layout: &'tcx GeneratorLayout<'tcx>,
generator_saved_local_names: &'a IndexVec<mir::GeneratorSavedLocal, Option<Symbol>>,
variant_index: VariantIdx,
},
}
impl<'tcx> VariantInfo<'_, 'tcx> {
fn map_struct_name<R>(&self, f: impl FnOnce(&str) -> R) -> R {
match self {
VariantInfo::Adt(variant) => f(&variant.ident.as_str()),
VariantInfo::Generator { variant_index, .. } => {
f(&GeneratorSubsts::variant_name(*variant_index))
}
}
}
fn variant_name(&self) -> String {
match self {
VariantInfo::Adt(variant) => variant.ident.to_string(),
VariantInfo::Generator { variant_index, .. } => {
// Since GDB currently prints out the raw discriminant along
// with every variant, make each variant name be just the value
// of the discriminant. The struct name for the variant includes
// the actual variant description.
format!("{}", variant_index.as_usize())
}
}
}
fn field_name(&self, i: usize) -> String {
let field_name = match *self {
VariantInfo::Adt(variant) if variant.ctor_kind != CtorKind::Fn => {
Some(variant.fields[i].ident.name)
}
VariantInfo::Generator {
generator_layout,
generator_saved_local_names,
variant_index,
..
} => {
generator_saved_local_names
[generator_layout.variant_fields[variant_index][i.into()]]
}
_ => None,
};
field_name.map(|name| name.to_string()).unwrap_or_else(|| format!("__{}", i))
}
fn source_info(&self, cx: &CodegenCx<'ll, 'tcx>) -> Option<SourceInfo<'ll>> {
match self {
VariantInfo::Generator { def_id, variant_index, .. } => {
let span =
cx.tcx.generator_layout(*def_id).variant_source_info[*variant_index].span;
if !span.is_dummy() {
let loc = cx.lookup_debug_loc(span.lo());
return Some(SourceInfo {
file: file_metadata(cx, &loc.file),
line: loc.line.unwrap_or(UNKNOWN_LINE_NUMBER),
});
}
}
_ => {}
}
None
}
fn is_artificial(&self) -> bool {
match self {
VariantInfo::Generator { .. } => true,
VariantInfo::Adt(..) => false,
}
}
}
/// Returns a tuple of (1) `type_metadata_stub` of the variant, (2) a
/// `MemberDescriptionFactory` for producing the descriptions of the
/// fields of the variant. This is a rudimentary version of a full
/// `RecursiveTypeDescription`.
fn describe_enum_variant(
cx: &CodegenCx<'ll, 'tcx>,
layout: layout::TyAndLayout<'tcx>,
variant: VariantInfo<'_, 'tcx>,
discriminant_info: EnumTagInfo<'ll>,
containing_scope: &'ll DIScope,
span: Span,
) -> (&'ll DICompositeType, MemberDescriptionFactory<'ll, 'tcx>) {
let metadata_stub = variant.map_struct_name(|variant_name| {
let unique_type_id = debug_context(cx)
.type_map
.borrow_mut()
.get_unique_type_id_of_enum_variant(cx, layout.ty, &variant_name);
create_struct_stub(
cx,
layout.ty,
&variant_name,
unique_type_id,
Some(containing_scope),
// FIXME(tmandry): This doesn't seem to have any effect.
if variant.is_artificial() { DIFlags::FlagArtificial } else { DIFlags::FlagZero },
)
});
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let (offsets, args) = if use_enum_fallback(cx) {
// If this is not a univariant enum, there is also the discriminant field.
let (discr_offset, discr_arg) = match discriminant_info {
RegularTag { tag_field, .. } => {
// We have the layout of an enum variant, we need the layout of the outer enum
let enum_layout = cx.layout_of(layout.ty);
let offset = enum_layout.fields.offset(tag_field.as_usize());
let args =
("RUST$ENUM$DISR".to_owned(), enum_layout.field(cx, tag_field.as_usize()).ty);
(Some(offset), Some(args))
}
_ => (None, None),
};
(
discr_offset
.into_iter()
.chain((0..layout.fields.count()).map(|i| layout.fields.offset(i)))
.collect(),
discr_arg
.into_iter()
.chain(
(0..layout.fields.count())
.map(|i| (variant.field_name(i), layout.field(cx, i).ty)),
)
.collect(),
)
} else {
(
(0..layout.fields.count()).map(|i| layout.fields.offset(i)).collect(),
(0..layout.fields.count())
.map(|i| (variant.field_name(i), layout.field(cx, i).ty))
.collect(),
)
};
let member_description_factory = VariantMDF(VariantMemberDescriptionFactory {
offsets,
args,
tag_type_metadata: match discriminant_info {
RegularTag { tag_type_metadata, .. } => Some(tag_type_metadata),
_ => None,
},
span,
});
(metadata_stub, member_description_factory)
}
fn prepare_enum_metadata(
cx: &CodegenCx<'ll, 'tcx>,
enum_type: Ty<'tcx>,
enum_def_id: DefId,
unique_type_id: UniqueTypeId,
span: Span,
outer_field_tys: Vec<Ty<'tcx>>,
) -> RecursiveTypeDescription<'ll, 'tcx> {
let tcx = cx.tcx;
let enum_name = compute_debuginfo_type_name(tcx, enum_type, false);
// FIXME(tmandry): This doesn't seem to have any effect.
let enum_flags = match enum_type.kind() {
ty::Generator(..) => DIFlags::FlagArtificial,
_ => DIFlags::FlagZero,
};
let containing_scope = get_namespace_for_item(cx, enum_def_id);
// FIXME: This should emit actual file metadata for the enum, but we
// currently can't get the necessary information when it comes to types
// imported from other crates. Formerly we violated the ODR when performing
// LTO because we emitted debuginfo for the same type with varying file
// metadata, so as a workaround we pretend that the type comes from
// <unknown>
let file_metadata = unknown_file_metadata(cx);
let discriminant_type_metadata = |discr: Primitive| {
let enumerators_metadata: Vec<_> = match enum_type.kind() {
ty::Adt(def, _) => def
.discriminants(tcx)
.zip(&def.variants)
.map(|((_, discr), v)| {
let name = v.ident.as_str();
let is_unsigned = match discr.ty.kind() {
ty::Int(_) => false,
ty::Uint(_) => true,
_ => bug!("non integer discriminant"),
};
unsafe {
Some(llvm::LLVMRustDIBuilderCreateEnumerator(
DIB(cx),
name.as_ptr().cast(),
name.len(),
// FIXME: what if enumeration has i128 discriminant?
discr.val as i64,
is_unsigned,
))
}
})
.collect(),
ty::Generator(_, substs, _) => substs
.as_generator()
.variant_range(enum_def_id, tcx)
.map(|variant_index| {
debug_assert_eq!(tcx.types.u32, substs.as_generator().discr_ty(tcx));
let name = GeneratorSubsts::variant_name(variant_index);
unsafe {
Some(llvm::LLVMRustDIBuilderCreateEnumerator(
DIB(cx),
name.as_ptr().cast(),
name.len(),
// Generators use u32 as discriminant type, verified above.
variant_index.as_u32().into(),
true, // IsUnsigned
))
}
})
.collect(),
_ => bug!(),
};
let disr_type_key = (enum_def_id, discr);
let cached_discriminant_type_metadata =
debug_context(cx).created_enum_disr_types.borrow().get(&disr_type_key).cloned();
match cached_discriminant_type_metadata {
Some(discriminant_type_metadata) => discriminant_type_metadata,
None => {
let (discriminant_size, discriminant_align) = (discr.size(cx), discr.align(cx));
let discriminant_base_type_metadata =
type_metadata(cx, discr.to_ty(tcx), rustc_span::DUMMY_SP);
let item_name;
let discriminant_name = match enum_type.kind() {
ty::Adt(..) => {
item_name = tcx.item_name(enum_def_id).as_str();
&*item_name
}
ty::Generator(..) => enum_name.as_str(),
_ => bug!(),
};
let discriminant_type_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateEnumerationType(
DIB(cx),
containing_scope,
discriminant_name.as_ptr().cast(),
discriminant_name.len(),
file_metadata,
UNKNOWN_LINE_NUMBER,
discriminant_size.bits(),
discriminant_align.abi.bits() as u32,
create_DIArray(DIB(cx), &enumerators_metadata),
discriminant_base_type_metadata,
true,
)
};
debug_context(cx)
.created_enum_disr_types
.borrow_mut()
.insert(disr_type_key, discriminant_type_metadata);
discriminant_type_metadata
}
}
};
let layout = cx.layout_of(enum_type);
if let (
&Abi::Scalar(_),
&Variants::Multiple { tag_encoding: TagEncoding::Direct, ref tag, .. },
) = (&layout.abi, &layout.variants)
{
return FinalMetadata(discriminant_type_metadata(tag.value));
}
if use_enum_fallback(cx) {
let discriminant_type_metadata = match layout.variants {
Variants::Single { .. }
| Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => None,
Variants::Multiple { tag_encoding: TagEncoding::Direct, ref tag, .. } => {
Some(discriminant_type_metadata(tag.value))
}
};
let enum_metadata = {
let type_map = debug_context(cx).type_map.borrow();
let unique_type_id_str = type_map.get_unique_type_id_as_string(unique_type_id);
unsafe {
llvm::LLVMRustDIBuilderCreateUnionType(
DIB(cx),
containing_scope,
enum_name.as_ptr().cast(),
enum_name.len(),
file_metadata,
UNKNOWN_LINE_NUMBER,
layout.size.bits(),
layout.align.abi.bits() as u32,
enum_flags,
None,
0, // RuntimeLang
unique_type_id_str.as_ptr().cast(),
unique_type_id_str.len(),
)
}
};
return create_and_register_recursive_type_forward_declaration(
cx,
enum_type,
unique_type_id,
enum_metadata,
enum_metadata,
EnumMDF(EnumMemberDescriptionFactory {
enum_type,
layout,
tag_type_metadata: discriminant_type_metadata,
containing_scope,
span,
}),
);
}
let discriminator_name = match enum_type.kind() {
ty::Generator(..) => "__state",
_ => "",
};
let discriminator_metadata = match layout.variants {
// A single-variant enum has no discriminant.
Variants::Single { .. } => None,
Variants::Multiple {
tag_encoding: TagEncoding::Niche { .. }, ref tag, tag_field, ..
} => {
// Find the integer type of the correct size.
let size = tag.value.size(cx);
let align = tag.value.align(cx);
let tag_type = match tag.value {
Int(t, _) => t,
F32 => Integer::I32,
F64 => Integer::I64,
Pointer => cx.data_layout().ptr_sized_integer(),
}
.to_ty(cx.tcx, false);
let tag_metadata = basic_type_metadata(cx, tag_type);
unsafe {
Some(llvm::LLVMRustDIBuilderCreateMemberType(
DIB(cx),
containing_scope,
discriminator_name.as_ptr().cast(),
discriminator_name.len(),
file_metadata,
UNKNOWN_LINE_NUMBER,
size.bits(),
align.abi.bits() as u32,
layout.fields.offset(tag_field).bits(),
DIFlags::FlagArtificial,
tag_metadata,
))
}
}
Variants::Multiple { tag_encoding: TagEncoding::Direct, ref tag, tag_field, .. } => {
let discr_type = tag.value.to_ty(cx.tcx);
let (size, align) = cx.size_and_align_of(discr_type);
let discr_metadata = basic_type_metadata(cx, discr_type);
unsafe {
Some(llvm::LLVMRustDIBuilderCreateMemberType(
DIB(cx),
containing_scope,
discriminator_name.as_ptr().cast(),
discriminator_name.len(),
file_metadata,
UNKNOWN_LINE_NUMBER,
size.bits(),
align.bits() as u32,
layout.fields.offset(tag_field).bits(),
DIFlags::FlagArtificial,
discr_metadata,
))
}
}
};
let mut outer_fields = match layout.variants {
Variants::Single { .. } => vec![],
Variants::Multiple { .. } => {
let tuple_mdf = TupleMemberDescriptionFactory {
ty: enum_type,
component_types: outer_field_tys,
span,
};
tuple_mdf
.create_member_descriptions(cx)
.into_iter()
.map(|desc| Some(desc.into_metadata(cx, containing_scope)))
.collect()
}
};
let variant_part_unique_type_id_str = debug_context(cx)
.type_map
.borrow_mut()
.get_unique_type_id_str_of_enum_variant_part(unique_type_id);
let empty_array = create_DIArray(DIB(cx), &[]);
let name = "";
let variant_part = unsafe {
llvm::LLVMRustDIBuilderCreateVariantPart(
DIB(cx),
containing_scope,
name.as_ptr().cast(),
name.len(),
file_metadata,
UNKNOWN_LINE_NUMBER,
layout.size.bits(),
layout.align.abi.bits() as u32,
enum_flags,
discriminator_metadata,
empty_array,
variant_part_unique_type_id_str.as_ptr().cast(),
variant_part_unique_type_id_str.len(),
)
};
outer_fields.push(Some(variant_part));
let struct_wrapper = {
// The variant part must be wrapped in a struct according to DWARF.
let type_array = create_DIArray(DIB(cx), &outer_fields);
let type_map = debug_context(cx).type_map.borrow();
let unique_type_id_str = type_map.get_unique_type_id_as_string(unique_type_id);
unsafe {
llvm::LLVMRustDIBuilderCreateStructType(
DIB(cx),
Some(containing_scope),
enum_name.as_ptr().cast(),
enum_name.len(),
file_metadata,
UNKNOWN_LINE_NUMBER,
layout.size.bits(),
layout.align.abi.bits() as u32,
enum_flags,
None,
type_array,
0,
None,
unique_type_id_str.as_ptr().cast(),
unique_type_id_str.len(),
)
}
};
create_and_register_recursive_type_forward_declaration(
cx,
enum_type,
unique_type_id,
struct_wrapper,
variant_part,
EnumMDF(EnumMemberDescriptionFactory {
enum_type,
layout,
tag_type_metadata: None,
containing_scope,
span,
}),
)
}
/// Creates debug information for a composite type, that is, anything that
/// results in a LLVM struct.
///
/// Examples of Rust types to use this are: structs, tuples, boxes, vecs, and enums.
fn composite_type_metadata(
cx: &CodegenCx<'ll, 'tcx>,
composite_type: Ty<'tcx>,
composite_type_name: &str,
composite_type_unique_id: UniqueTypeId,
member_descriptions: Vec<MemberDescription<'ll>>,
containing_scope: Option<&'ll DIScope>,
// Ignore source location information as long as it
// can't be reconstructed for non-local crates.
_file_metadata: &'ll DIFile,
_definition_span: Span,
) -> &'ll DICompositeType {
// Create the (empty) struct metadata node ...
let composite_type_metadata = create_struct_stub(
cx,
composite_type,
composite_type_name,
composite_type_unique_id,
containing_scope,
DIFlags::FlagZero,
);
// ... and immediately create and add the member descriptions.
set_members_of_composite_type(cx, composite_type, composite_type_metadata, member_descriptions);
composite_type_metadata
}
fn set_members_of_composite_type(
cx: &CodegenCx<'ll, 'tcx>,
composite_type: Ty<'tcx>,
composite_type_metadata: &'ll DICompositeType,
member_descriptions: Vec<MemberDescription<'ll>>,
) {
// In some rare cases LLVM metadata uniquing would lead to an existing type
// description being used instead of a new one created in
// create_struct_stub. This would cause a hard to trace assertion in
// DICompositeType::SetTypeArray(). The following check makes sure that we
// get a better error message if this should happen again due to some
// regression.
{
let mut composite_types_completed =
debug_context(cx).composite_types_completed.borrow_mut();
if !composite_types_completed.insert(&composite_type_metadata) {
bug!(
"debuginfo::set_members_of_composite_type() - \
Already completed forward declaration re-encountered."
);
}
}
let member_metadata: Vec<_> = member_descriptions
.into_iter()
.map(|desc| Some(desc.into_metadata(cx, composite_type_metadata)))
.collect();
let type_params = compute_type_parameters(cx, composite_type);
unsafe {
let type_array = create_DIArray(DIB(cx), &member_metadata[..]);
llvm::LLVMRustDICompositeTypeReplaceArrays(
DIB(cx),
composite_type_metadata,
Some(type_array),
type_params,
);
}
}
/// Computes the type parameters for a type, if any, for the given metadata.
fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'ll DIArray> {
if let ty::Adt(def, substs) = *ty.kind() {
if substs.types().next().is_some() {
let generics = cx.tcx.generics_of(def.did);
let names = get_parameter_names(cx, generics);
let template_params: Vec<_> = substs
.iter()
.zip(names)
.filter_map(|(kind, name)| {
if let GenericArgKind::Type(ty) = kind.unpack() {
let actual_type =
cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty);
let actual_type_metadata =
type_metadata(cx, actual_type, rustc_span::DUMMY_SP);
let name = &name.as_str();
Some(unsafe {
Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
DIB(cx),
None,
name.as_ptr().cast(),
name.len(),
actual_type_metadata,
))
})
} else {
None
}
})
.collect();
return Some(create_DIArray(DIB(cx), &template_params[..]));
}
}
return Some(create_DIArray(DIB(cx), &[]));
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {
let mut names = generics
.parent
.map_or(vec![], |def_id| get_parameter_names(cx, cx.tcx.generics_of(def_id)));
names.extend(generics.params.iter().map(|param| param.name));
names
}
}
/// A convenience wrapper around `LLVMRustDIBuilderCreateStructType()`. Does not do
/// any caching, does not add any fields to the struct. This can be done later
/// with `set_members_of_composite_type()`.
fn create_struct_stub(
cx: &CodegenCx<'ll, 'tcx>,
struct_type: Ty<'tcx>,
struct_type_name: &str,
unique_type_id: UniqueTypeId,
containing_scope: Option<&'ll DIScope>,
flags: DIFlags,
) -> &'ll DICompositeType {
let (struct_size, struct_align) = cx.size_and_align_of(struct_type);
let type_map = debug_context(cx).type_map.borrow();
let unique_type_id = type_map.get_unique_type_id_as_string(unique_type_id);
let metadata_stub = unsafe {
// `LLVMRustDIBuilderCreateStructType()` wants an empty array. A null
// pointer will lead to hard to trace and debug LLVM assertions
// later on in `llvm/lib/IR/Value.cpp`.
let empty_array = create_DIArray(DIB(cx), &[]);
llvm::LLVMRustDIBuilderCreateStructType(
DIB(cx),
containing_scope,
struct_type_name.as_ptr().cast(),
struct_type_name.len(),
unknown_file_metadata(cx),
UNKNOWN_LINE_NUMBER,
struct_size.bits(),
struct_align.bits() as u32,
flags,
None,
empty_array,
0,
None,
unique_type_id.as_ptr().cast(),
unique_type_id.len(),
)
};
metadata_stub
}
fn create_union_stub(
cx: &CodegenCx<'ll, 'tcx>,
union_type: Ty<'tcx>,
union_type_name: &str,
unique_type_id: UniqueTypeId,
containing_scope: &'ll DIScope,
) -> &'ll DICompositeType {
let (union_size, union_align) = cx.size_and_align_of(union_type);
let type_map = debug_context(cx).type_map.borrow();
let unique_type_id = type_map.get_unique_type_id_as_string(unique_type_id);
let metadata_stub = unsafe {
// `LLVMRustDIBuilderCreateUnionType()` wants an empty array. A null
// pointer will lead to hard to trace and debug LLVM assertions
// later on in `llvm/lib/IR/Value.cpp`.
let empty_array = create_DIArray(DIB(cx), &[]);
llvm::LLVMRustDIBuilderCreateUnionType(
DIB(cx),
containing_scope,
union_type_name.as_ptr().cast(),
union_type_name.len(),
unknown_file_metadata(cx),
UNKNOWN_LINE_NUMBER,
union_size.bits(),
union_align.bits() as u32,
DIFlags::FlagZero,
Some(empty_array),
0, // RuntimeLang
unique_type_id.as_ptr().cast(),
unique_type_id.len(),
)
};
metadata_stub
}
/// Creates debug information for the given global variable.
///
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_global_var_metadata(cx: &CodegenCx<'ll, '_>, def_id: DefId, global: &'ll Value) {
if cx.dbg_cx.is_none() {
return;
}
// Only create type information if full debuginfo is enabled
if cx.sess().opts.debuginfo != DebugInfo::Full {
return;
}
let tcx = cx.tcx;
// We may want to remove the namespace scope if we're in an extern block (see
// https://github.com/rust-lang/rust/pull/46457#issuecomment-351750952).
let var_scope = get_namespace_for_item(cx, def_id);
let span = tcx.def_span(def_id);
let (file_metadata, line_number) = if !span.is_dummy() {
let loc = cx.lookup_debug_loc(span.lo());
(file_metadata(cx, &loc.file), loc.line)
} else {
(unknown_file_metadata(cx), None)
};
let is_local_to_unit = is_node_local_to_unit(cx, def_id);
let variable_type = Instance::mono(cx.tcx, def_id).ty(cx.tcx, ty::ParamEnv::reveal_all());
let type_metadata = type_metadata(cx, variable_type, span);
let var_name = tcx.item_name(def_id).as_str();
let linkage_name = mangled_name_of_instance(cx, Instance::mono(tcx, def_id)).name;
// When empty, linkage_name field is omitted,
// which is what we want for no_mangle statics
let linkage_name = if var_name == linkage_name { "" } else { linkage_name };
let global_align = cx.align_of(variable_type);
unsafe {
llvm::LLVMRustDIBuilderCreateStaticVariable(
DIB(cx),
Some(var_scope),
var_name.as_ptr().cast(),
var_name.len(),
linkage_name.as_ptr().cast(),
linkage_name.len(),
file_metadata,
line_number.unwrap_or(UNKNOWN_LINE_NUMBER),
type_metadata,
is_local_to_unit,
global,
None,
global_align.bytes() as u32,
);
}
}
/// Creates debug information for the given vtable, which is for the
/// given type.
///
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: &'ll Value) {
if cx.dbg_cx.is_none() {
return;
}
// Only create type information if full debuginfo is enabled
if cx.sess().opts.debuginfo != DebugInfo::Full {
return;
}
let type_metadata = type_metadata(cx, ty, rustc_span::DUMMY_SP);
unsafe {
// `LLVMRustDIBuilderCreateStructType()` wants an empty array. A null
// pointer will lead to hard to trace and debug LLVM assertions
// later on in `llvm/lib/IR/Value.cpp`.
let empty_array = create_DIArray(DIB(cx), &[]);
let name = "vtable";
// Create a new one each time. We don't want metadata caching
// here, because each vtable will refer to a unique containing
// type.
let vtable_type = llvm::LLVMRustDIBuilderCreateStructType(
DIB(cx),
NO_SCOPE_METADATA,
name.as_ptr().cast(),
name.len(),
unknown_file_metadata(cx),
UNKNOWN_LINE_NUMBER,
Size::ZERO.bits(),
cx.tcx.data_layout.pointer_align.abi.bits() as u32,
DIFlags::FlagArtificial,
None,
empty_array,
0,
Some(type_metadata),
name.as_ptr().cast(),
name.len(),
);
let linkage_name = "";
llvm::LLVMRustDIBuilderCreateStaticVariable(
DIB(cx),
NO_SCOPE_METADATA,
name.as_ptr().cast(),
name.len(),
linkage_name.as_ptr().cast(),
linkage_name.len(),
unknown_file_metadata(cx),
UNKNOWN_LINE_NUMBER,
vtable_type,
true,
vtable,
None,
0,
);
}
}
/// Creates an "extension" of an existing `DIScope` into another file.
pub fn extend_scope_to_file(
cx: &CodegenCx<'ll, '_>,
scope_metadata: &'ll DIScope,
file: &SourceFile,
) -> &'ll DILexicalBlock {
let file_metadata = file_metadata(cx, file);
unsafe { llvm::LLVMRustDIBuilderCreateLexicalBlockFile(DIB(cx), scope_metadata, file_metadata) }
}
| 36.953894 | 100 | 0.548816 |
8f626022059ff9bf50f2923d081125ef978947cd | 3,967 | // Generated from definition io.k8s.api.extensions.v1beta1.FSGroupStrategyOptions
/// FSGroupStrategyOptions defines the strategy type and options used to create the strategy.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct FSGroupStrategyOptions {
/// Ranges are the allowed ranges of fs groups. If you would like to force a single fs group then supply a single range with the same start and end.
pub ranges: Option<Vec<crate::v1_10::api::extensions::v1beta1::IDRange>>,
/// Rule is the strategy that will dictate what FSGroup is used in the SecurityContext.
pub rule: Option<String>,
}
impl<'de> serde::Deserialize<'de> for FSGroupStrategyOptions {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_ranges,
Key_rule,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"ranges" => Field::Key_ranges,
"rule" => Field::Key_rule,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = FSGroupStrategyOptions;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct FSGroupStrategyOptions")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_ranges: Option<Vec<crate::v1_10::api::extensions::v1beta1::IDRange>> = None;
let mut value_rule: Option<String> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_ranges => value_ranges = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_rule => value_rule = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(FSGroupStrategyOptions {
ranges: value_ranges,
rule: value_rule,
})
}
}
deserializer.deserialize_struct(
"FSGroupStrategyOptions",
&[
"ranges",
"rule",
],
Visitor,
)
}
}
impl serde::Serialize for FSGroupStrategyOptions {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"FSGroupStrategyOptions",
self.ranges.as_ref().map_or(0, |_| 1) +
self.rule.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.ranges {
serde::ser::SerializeStruct::serialize_field(&mut state, "ranges", value)?;
}
if let Some(value) = &self.rule {
serde::ser::SerializeStruct::serialize_field(&mut state, "rule", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 39.277228 | 153 | 0.531132 |
fc3a00e53c103e0526890f42a508caf58e897255 | 1,270 | pub struct LineText
{
line_width: Vec<usize>
}
impl LineText {
pub fn new(text: String) -> LineText {
LineText {
line_width: LineText::calculate_line_width(text)
}
}
///internally used to calculate the line widths of the text
fn calculate_line_width(input:String)->Vec<usize>
{
let mut line_width = Vec::new();
let mut width = 0;
for c in input.chars() {
if c == '\n' {
line_width.push(width+1);
width = 0;
}
else {
width += 1;
}
}
line_width.push(width+1);
line_width
}
///returns the line number,column number of the token at the given index
pub fn get_point(&self,start:usize)->(usize,usize)
{
let mut line_number=0;
let mut sum:usize=0;
// visit each line width
for i in self.line_width.iter()
{
//if the sum+current_line_size is greater than the start index, then we are in the line
if sum+*i>start
{
break;
}
sum+=*i;
line_number+=1;
}
(line_number+1,if start>sum {start-sum+1} else{ sum-start+1})
}
} | 25.918367 | 99 | 0.508661 |
8a7cc09230edddd8b96e7717ad235ad6b98f4b67 | 2,228 | //! Array declaration node.
use super::{join_nodes, Node};
use boa_gc::{Finalize, Trace};
use boa_interner::{Interner, ToInternedString};
#[cfg(feature = "deser")]
use serde::{Deserialize, Serialize};
#[cfg(test)]
mod tests;
/// An array is an ordered collection of data (either primitive or object depending upon the
/// language).
///
/// Arrays are used to store multiple values in a single variable.
/// This is compared to a variable that can store only one value.
///
/// Each item in an array has a number attached to it, called a numeric index, that allows you
/// to access it. In JavaScript, arrays start at index zero and can be manipulated with various
/// methods.
///
/// More information:
/// - [ECMAScript reference][spec]
/// - [MDN documentation][mdn]
///
/// [spec]: https://tc39.es/ecma262/#prod-ArrayLiteral
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub struct ArrayDecl {
arr: Box<[Node]>,
has_trailing_comma_spread: bool,
}
impl ArrayDecl {
/// Crate a new array declaration.
pub(crate) fn new<A>(array: A, has_trailing_comma_spread: bool) -> Self
where
A: Into<Box<[Node]>>,
{
Self {
arr: array.into(),
has_trailing_comma_spread,
}
}
/// Indicates if a spread operator in the array literal has a trailing comma.
/// This is a syntax error in some cases.
pub(crate) fn has_trailing_comma_spread(&self) -> bool {
self.has_trailing_comma_spread
}
}
impl AsRef<[Node]> for ArrayDecl {
fn as_ref(&self) -> &[Node] {
&self.arr
}
}
impl<T> From<T> for ArrayDecl
where
T: Into<Box<[Node]>>,
{
fn from(decl: T) -> Self {
Self {
arr: decl.into(),
has_trailing_comma_spread: false,
}
}
}
impl ToInternedString for ArrayDecl {
fn to_interned_string(&self, interner: &Interner) -> String {
format!("[{}]", join_nodes(interner, &self.arr))
}
}
impl From<ArrayDecl> for Node {
fn from(arr: ArrayDecl) -> Self {
Self::ArrayDecl(arr)
}
}
| 26.52381 | 97 | 0.643627 |
16f7def8d0520e6af4bfbcfbafb6194b0f5c291a | 411 | use crate::physical_plan::state::ExecutionState;
use crate::prelude::*;
use polars_core::prelude::*;
pub(crate) struct UdfExec {
pub(crate) input: Box<dyn Executor>,
pub(crate) function: Arc<dyn DataFrameUdf>,
}
impl Executor for UdfExec {
fn execute(&mut self, state: &ExecutionState) -> Result<DataFrame> {
let df = self.input.execute(state)?;
self.function.call_udf(df)
}
}
| 25.6875 | 72 | 0.676399 |
acbb6ad45e7b5e45be3e20231c92f7fc9bf486b7 | 6,024 | #[macro_use]
extern crate glium;
extern crate arcball;
extern crate cgmath;
use glium::Surface;
use glium::index::PrimitiveType;
use glium::glutin::{self, ElementState, Event, VirtualKeyCode, MouseButton, MouseScrollDelta};
use cgmath::{Point3, Vector3, Vector2, Matrix4};
use arcball::ArcballCamera;
#[derive(Copy, Clone)]
struct Vertex {
pos: [f32; 3],
color: [f32; 3],
}
implement_vertex!(Vertex, pos, color);
fn main() {
use glium::DisplayBuild;
let display = glutin::WindowBuilder::new()
.with_title("Arcball Camera Cube Example")
.build_glium()
.unwrap();
// Hard-coded cube triangle strip
let vertex_buffer = glium::VertexBuffer::new(&display,
&[Vertex { pos: [1.0, 1.0, -1.0], color: [1.0, 0.0, 0.0] },
Vertex { pos: [-1.0, 1.0, -1.0], color: [1.0, 0.0, 0.0] },
Vertex { pos: [1.0, 1.0, 1.0], color: [1.0, 0.0, 0.0] },
Vertex { pos: [-1.0, 1.0, 1.0], color: [0.0, 1.0, 0.0] },
Vertex { pos: [-1.0, -1.0, 1.0], color: [0.0, 1.0, 0.0] },
Vertex { pos: [-1.0, 1.0, -1.0], color: [0.0, 1.0, 0.0] },
Vertex { pos: [-1.0, -1.0, -1.0], color: [0.0, 0.0, 1.0] },
Vertex { pos: [1.0, 1.0, -1.0], color: [0.0, 0.0, 1.0] },
Vertex { pos: [1.0, -1.0, -1.0], color: [0.0, 0.0, 1.0] },
Vertex { pos: [1.0, 1.0, 1.0], color: [1.0, 1.0, 0.0] },
Vertex { pos: [1.0, -1.0, 1.0], color: [1.0, 1.0, 0.0] },
Vertex { pos: [-1.0, -1.0, 1.0], color: [1.0, 1.0, 0.0] },
Vertex { pos: [1.0, -1.0, -1.0], color: [1.0, 0.0, 1.0] },
Vertex { pos: [-1.0, -1.0, -1.0], color: [1.0, 0.0, 1.0] }
]
).unwrap();
let index_buffer = glium::index::NoIndices(PrimitiveType::TriangleStrip);
let program = program!(&display,
140 => {
vertex: "
#version 140
uniform mat4 proj_view;
in vec3 pos;
in vec3 color;
out vec3 vcolor;
void main(void) {
gl_Position = proj_view * vec4(pos, 1.0);
vcolor = color;
}
",
fragment: "
#version 140
in vec3 vcolor;
out vec4 color;
void main(void) {
color = vec4(vcolor, 1.0);
}
"
},
).unwrap();
let display_dims = display.get_framebuffer_dimensions();
let mut persp_proj = cgmath::perspective(cgmath::Deg(65.0), display_dims.0 as f32 / display_dims.1 as f32,
1.0, 200.0);
let mut arcball_camera = {
let look_at = Matrix4::<f32>::look_at(Point3::new(0.0, 0.0, 6.0),
Point3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 1.0, 0.0));
ArcballCamera::new(&look_at, 0.05, 4.0, [display_dims.0 as f32, display_dims.1 as f32])
};
// Track if left/right mouse is down
let mut mouse_pressed = [false, false];
let mut prev_mouse = None;
'outer: loop {
for e in display.poll_events() {
match e {
glutin::Event::Closed => break 'outer,
Event::KeyboardInput(state, _, code) => {
let pressed = state == ElementState::Pressed;
match code {
Some(VirtualKeyCode::Escape) if pressed => break 'outer,
_ => {}
}
},
Event::MouseMoved(x, y) if prev_mouse.is_none() => {
prev_mouse = Some((x, y));
},
Event::MouseMoved(x, y) => {
let prev = prev_mouse.unwrap();
if mouse_pressed[0] {
arcball_camera.rotate(Vector2::new(prev.0 as f32, prev.1 as f32),
Vector2::new(x as f32, y as f32));
} else if mouse_pressed[1] {
let mouse_delta = Vector2::new((x - prev.0) as f32, -(y - prev.1) as f32);
arcball_camera.pan(mouse_delta, 0.16);
}
prev_mouse = Some((x, y));
},
Event::MouseInput(state, button) => {
if button == MouseButton::Left {
mouse_pressed[0] = state == ElementState::Pressed;
} else if button == MouseButton::Right {
mouse_pressed[1] = state == ElementState::Pressed;
}
},
Event::MouseWheel(delta, _) => {
let y = match delta {
MouseScrollDelta::LineDelta(_, y) => y,
MouseScrollDelta::PixelDelta(_, y) => y,
};
arcball_camera.zoom(y, 0.16);
},
Event::Resized(w, h) => {
persp_proj = cgmath::perspective(cgmath::Deg(65.0), w as f32 / h as f32, 1.0, 1000.0);
arcball_camera.update_screen(w as f32, h as f32);
},
_ => {}
}
}
let proj_view: [[f32; 4]; 4] = (persp_proj * arcball_camera.get_mat4()).into();
let uniforms = uniform! {
proj_view: proj_view,
};
let draw_params = glium::DrawParameters {
depth: glium::Depth {
test: glium::draw_parameters::DepthTest::IfLess,
write: true,
.. Default::default()
},
.. Default::default()
};
let mut target = display.draw();
target.clear_color(0.1, 0.1, 0.1, 0.0);
target.clear_depth(1.0);
target.draw(&vertex_buffer, &index_buffer, &program, &uniforms, &draw_params).unwrap();
target.finish().unwrap();
}
}
| 37.416149 | 110 | 0.452855 |
fc5f7a4f66ae1b3e5d24c5a674d566c2faab43e2 | 434 | //! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_100.hrx"
#[allow(unused)]
fn runner() -> crate::TestRunner {
super::runner()
}
#[test]
fn test() {
assert_eq!(
runner().ok("$endColor: red;\r\
\ntest {\r\
\n background-color: darken($endColor, 10%) \\9;\r\
\n}"),
"test {\
\n background-color: #cc0000 \\9 ;\
\n}\n"
);
}
| 21.7 | 82 | 0.504608 |
e25ae72973787872eeb072ccad6aee4e0270483d | 11,338 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// Service config.
///
///
/// Service configuration allows for customization of endpoints, region, credentials providers,
/// and retry configuration. Generally, it is constructed automatically for you from a shared
/// configuration loaded by the `aws-config` crate. For example:
///
/// ```ignore
/// // Load a shared config from the environment
/// let shared_config = aws_config::from_env().load().await;
/// // The client constructor automatically converts the shared config into the service config
/// let client = Client::new(&shared_config);
/// ```
///
/// The service config can also be constructed manually using its builder.
///
pub struct Config {
app_name: Option<aws_types::app_name::AppName>,
pub(crate) timeout_config: Option<aws_smithy_types::timeout::TimeoutConfig>,
pub(crate) sleep_impl: Option<std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>>,
pub(crate) retry_config: Option<aws_smithy_types::retry::RetryConfig>,
pub(crate) endpoint_resolver: ::std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>,
pub(crate) region: Option<aws_types::region::Region>,
pub(crate) credentials_provider: aws_types::credentials::SharedCredentialsProvider,
}
impl std::fmt::Debug for Config {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut config = f.debug_struct("Config");
config.finish()
}
}
impl Config {
/// Constructs a config builder.
pub fn builder() -> Builder {
Builder::default()
}
/// Returns the name of the app that is using the client, if it was provided.
///
/// This _optional_ name is used to identify the application in the user agent that
/// gets sent along with requests.
pub fn app_name(&self) -> Option<&aws_types::app_name::AppName> {
self.app_name.as_ref()
}
/// Creates a new [service config](crate::Config) from a [shared `config`](aws_types::config::Config).
pub fn new(config: &aws_types::config::Config) -> Self {
Builder::from(config).build()
}
/// The signature version 4 service signing name to use in the credential scope when signing requests.
///
/// The signing service may be overridden by the `Endpoint`, or by specifying a custom
/// [`SigningService`](aws_types::SigningService) during operation construction
pub fn signing_service(&self) -> &'static str {
"sagemaker"
}
}
/// Builder for creating a `Config`.
#[derive(Default)]
pub struct Builder {
app_name: Option<aws_types::app_name::AppName>,
timeout_config: Option<aws_smithy_types::timeout::TimeoutConfig>,
sleep_impl: Option<std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>>,
retry_config: Option<aws_smithy_types::retry::RetryConfig>,
endpoint_resolver: Option<::std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>>,
region: Option<aws_types::region::Region>,
credentials_provider: Option<aws_types::credentials::SharedCredentialsProvider>,
}
impl Builder {
/// Constructs a config builder.
pub fn new() -> Self {
Self::default()
}
/// Sets the name of the app that is using the client.
///
/// This _optional_ name is used to identify the application in the user agent that
/// gets sent along with requests.
pub fn app_name(mut self, app_name: aws_types::app_name::AppName) -> Self {
self.set_app_name(Some(app_name));
self
}
/// Sets the name of the app that is using the client.
///
/// This _optional_ name is used to identify the application in the user agent that
/// gets sent along with requests.
pub fn set_app_name(&mut self, app_name: Option<aws_types::app_name::AppName>) -> &mut Self {
self.app_name = app_name;
self
}
/// Set the timeout_config for the builder
///
/// # Examples
///
/// ```no_run
/// # use std::time::Duration;
/// use aws_sdk_sagemakera2iruntime::config::Config;
/// use aws_smithy_types::timeout::TimeoutConfig;
///
/// let timeout_config = TimeoutConfig::new()
/// .with_api_call_attempt_timeout(Some(Duration::from_secs(1)));
/// let config = Config::builder().timeout_config(timeout_config).build();
/// ```
pub fn timeout_config(
mut self,
timeout_config: aws_smithy_types::timeout::TimeoutConfig,
) -> Self {
self.set_timeout_config(Some(timeout_config));
self
}
/// Set the timeout_config for the builder
///
/// # Examples
///
/// ```no_run
/// # use std::time::Duration;
/// use aws_sdk_sagemakera2iruntime::config::{Builder, Config};
/// use aws_smithy_types::timeout::TimeoutConfig;
///
/// fn set_request_timeout(builder: &mut Builder) {
/// let timeout_config = TimeoutConfig::new()
/// .with_api_call_timeout(Some(Duration::from_secs(3)));
/// builder.set_timeout_config(Some(timeout_config));
/// }
///
/// let mut builder = Config::builder();
/// set_request_timeout(&mut builder);
/// let config = builder.build();
/// ```
pub fn set_timeout_config(
&mut self,
timeout_config: Option<aws_smithy_types::timeout::TimeoutConfig>,
) -> &mut Self {
self.timeout_config = timeout_config;
self
}
/// Set the sleep_impl for the builder
///
/// # Examples
///
/// ```no_run
/// use aws_sdk_sagemakera2iruntime::config::Config;
/// use aws_smithy_async::rt::sleep::AsyncSleep;
/// use aws_smithy_async::rt::sleep::Sleep;
///
/// #[derive(Debug)]
/// pub struct ForeverSleep;
///
/// impl AsyncSleep for ForeverSleep {
/// fn sleep(&self, duration: std::time::Duration) -> Sleep {
/// Sleep::new(std::future::pending())
/// }
/// }
///
/// let sleep_impl = std::sync::Arc::new(ForeverSleep);
/// let config = Config::builder().sleep_impl(sleep_impl).build();
/// ```
pub fn sleep_impl(
mut self,
sleep_impl: std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>,
) -> Self {
self.set_sleep_impl(Some(sleep_impl));
self
}
/// Set the sleep_impl for the builder
///
/// # Examples
///
/// ```no_run
/// use aws_sdk_sagemakera2iruntime::config::{Builder, Config};
/// use aws_smithy_async::rt::sleep::AsyncSleep;
/// use aws_smithy_async::rt::sleep::Sleep;
///
/// #[derive(Debug)]
/// pub struct ForeverSleep;
///
/// impl AsyncSleep for ForeverSleep {
/// fn sleep(&self, duration: std::time::Duration) -> Sleep {
/// Sleep::new(std::future::pending())
/// }
/// }
///
/// fn set_never_ending_sleep_impl(builder: &mut Builder) {
/// let sleep_impl = std::sync::Arc::new(ForeverSleep);
/// builder.set_sleep_impl(Some(sleep_impl));
/// }
///
/// let mut builder = Config::builder();
/// set_never_ending_sleep_impl(&mut builder);
/// let config = builder.build();
/// ```
pub fn set_sleep_impl(
&mut self,
sleep_impl: Option<std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>>,
) -> &mut Self {
self.sleep_impl = sleep_impl;
self
}
/// Set the retry_config for the builder
///
/// # Examples
/// ```no_run
/// use aws_sdk_sagemakera2iruntime::config::Config;
/// use aws_smithy_types::retry::RetryConfig;
///
/// let retry_config = RetryConfig::new().with_max_attempts(5);
/// let config = Config::builder().retry_config(retry_config).build();
/// ```
pub fn retry_config(mut self, retry_config: aws_smithy_types::retry::RetryConfig) -> Self {
self.set_retry_config(Some(retry_config));
self
}
/// Set the retry_config for the builder
///
/// # Examples
/// ```no_run
/// use aws_sdk_sagemakera2iruntime::config::{Builder, Config};
/// use aws_smithy_types::retry::RetryConfig;
///
/// fn disable_retries(builder: &mut Builder) {
/// let retry_config = RetryConfig::new().with_max_attempts(1);
/// builder.set_retry_config(Some(retry_config));
/// }
///
/// let mut builder = Config::builder();
/// disable_retries(&mut builder);
/// let config = builder.build();
/// ```
pub fn set_retry_config(
&mut self,
retry_config: Option<aws_smithy_types::retry::RetryConfig>,
) -> &mut Self {
self.retry_config = retry_config;
self
}
// TODO(docs): include an example of using a static endpoint
/// Sets the endpoint resolver to use when making requests.
pub fn endpoint_resolver(
mut self,
endpoint_resolver: impl aws_endpoint::ResolveAwsEndpoint + 'static,
) -> Self {
self.endpoint_resolver = Some(::std::sync::Arc::new(endpoint_resolver));
self
}
/// Sets the AWS region to use when making requests.
pub fn region(mut self, region: impl Into<Option<aws_types::region::Region>>) -> Self {
self.region = region.into();
self
}
/// Sets the credentials provider for this service
pub fn credentials_provider(
mut self,
credentials_provider: impl aws_types::credentials::ProvideCredentials + 'static,
) -> Self {
self.credentials_provider = Some(aws_types::credentials::SharedCredentialsProvider::new(
credentials_provider,
));
self
}
/// Sets the credentials provider for this service
pub fn set_credentials_provider(
&mut self,
credentials_provider: Option<aws_types::credentials::SharedCredentialsProvider>,
) -> &mut Self {
self.credentials_provider = credentials_provider;
self
}
/// Builds a [`Config`].
pub fn build(self) -> Config {
Config {
app_name: self.app_name,
timeout_config: self.timeout_config,
sleep_impl: self.sleep_impl,
retry_config: self.retry_config,
endpoint_resolver: self
.endpoint_resolver
.unwrap_or_else(|| ::std::sync::Arc::new(crate::aws_endpoint::endpoint_resolver())),
region: self.region,
credentials_provider: self.credentials_provider.unwrap_or_else(|| {
aws_types::credentials::SharedCredentialsProvider::new(
crate::no_credentials::NoCredentials,
)
}),
}
}
}
impl From<&aws_types::config::Config> for Builder {
fn from(input: &aws_types::config::Config) -> Self {
let mut builder = Builder::default();
builder = builder.region(input.region().cloned());
builder.set_retry_config(input.retry_config().cloned());
builder.set_timeout_config(input.timeout_config().cloned());
builder.set_sleep_impl(input.sleep_impl().clone());
builder.set_credentials_provider(input.credentials_provider().cloned());
builder.set_app_name(input.app_name().cloned());
builder
}
}
impl From<&aws_types::config::Config> for Config {
fn from(config: &aws_types::config::Config) -> Self {
Builder::from(config).build()
}
}
| 36.692557 | 106 | 0.628947 |
7188321f782af51658b61a83e541370e2e954100 | 4,715 | use assert_cmd::Command;
use ffizer::tools::dir_diff_list;
use predicates::prelude::*;
use pretty_assertions::assert_eq;
use std::error::Error;
use std::fs;
use std::path::{Path, PathBuf};
use tempfile::tempdir;
use test_generator::test_resources;
#[test_resources("tests/data/template_*")]
fn run_test_samples(template_path: &str) {
let t = do_run_test_samples(template_path);
if let Err(e) = t {
dbg!(e);
assert!(false);
}
}
pub fn do_run_test_samples(template_path: &str) -> Result<(), Box<dyn Error>> {
Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("test-samples")
.arg("--source")
.arg(template_path)
.ok()?;
Ok(())
}
/// Are the contents of two directories same?
pub fn assert_is_same<A: AsRef<Path>, B: AsRef<Path>>(
actual_base: A,
expected_base: B,
output: &std::process::Output,
) -> Result<(), Box<dyn Error>> {
let diffs = dir_diff_list::search_diff(actual_base, expected_base)?;
dbg!(&output);
if !diffs.is_empty() || !output.status.success() {
dbg!(output);
}
assert_eq!(diffs, vec![]);
assert_eq!(output.status.success(), true);
Ok(())
}
#[test]
fn empty_template() -> Result<(), Box<dyn Error>> {
let tmp_dir = tempdir()?;
let template_path = tmp_dir.path().join("t0_template");
let expected_path = tmp_dir.path().join("t0_expected");
let actual_path = tmp_dir.path().join("t0_actual");
fs::create_dir_all(&template_path)?;
fs::create_dir_all(&expected_path)?;
let output = Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("apply")
.arg("--no-interaction")
.arg("--confirm")
.arg("never")
.arg("--update-mode")
.arg("keep")
.arg("--destination")
.arg(actual_path.to_str().unwrap())
.arg("--source")
.arg(template_path.to_str().unwrap())
.ok()?;
assert_is_same(&actual_path, &expected_path, &output)
}
#[test]
fn test_1_subfolder() -> Result<(), Box<dyn Error>> {
let source_subfolder = "dir_1";
let tmp_dir = tempdir()?;
let template_path = PathBuf::from("./tests/data/template_1");
let expected_path =
PathBuf::from("./tests/data/template_1/.ffizer.samples.d/my-project.expected")
.join(source_subfolder);
let actual_path = tmp_dir.path().to_path_buf();
let output = Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("apply")
.arg("--no-interaction")
.arg("--confirm")
.arg("never")
.arg("--update-mode")
.arg("keep")
.arg("--destination")
.arg(actual_path.to_str().unwrap())
.arg("--source")
.arg(template_path.to_str().unwrap())
.arg("--source-subfolder")
.arg(source_subfolder)
.ok()?;
assert_is_same(&actual_path, &expected_path, &output)
}
#[cfg(feature = "test_remote")]
#[test]
fn test_1_remote_master() -> Result<(), Box<dyn Error>> {
Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("test-samples")
.arg("--source")
.arg("https://github.com/ffizer/template_sample.git")
.ok()?;
Ok(())
}
#[cfg(feature = "test_remote")]
#[test]
fn test_1_remote_commitsha1() -> Result<(), Box<dyn Error>> {
Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("test-samples")
.arg("--source")
.arg("https://github.com/ffizer/template_sample.git")
.arg("--rev")
.arg("3ab3bc67b5fab58ceecc031f7ed0eb29c0e0fff8")
.ok()?;
Ok(())
}
#[cfg(feature = "test_remote")]
#[test]
fn test_1_remote_tag() -> Result<(), Box<dyn Error>> {
Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("test-samples")
.arg("--source")
.arg("https://github.com/ffizer/template_sample.git")
.arg("--rev")
.arg("1.2.0")
.ok()?;
Ok(())
}
// reproduce https://github.com/ffizer/ffizer/issues/195
#[test]
fn log_should_report_error() -> Result<(), Box<dyn Error>> {
let tmp_dir = tempdir()?;
let sample_path = PathBuf::from("tests/data/log_error");
let template_path = sample_path.join("template");
let actual_path = tmp_dir.path().join("my-project").to_path_buf();
Command::cargo_bin(env!("CARGO_PKG_NAME"))?
.arg("apply")
.arg("--no-interaction")
.arg("--confirm")
.arg("never")
.arg("--destination")
.arg(actual_path.to_str().unwrap())
.arg("--source")
.arg(template_path.to_str().unwrap())
.assert()
.stderr(
predicate::str::contains("source: TemplateError(")
.and(predicate::str::contains("reason: InvalidSyntax,")),
)
.failure();
Ok(())
}
| 29.654088 | 86 | 0.589608 |
f5da93f93fd932b0e7ed6134e1893c18b6d02d2a | 2,907 | use super::*;
fn generate_fake_frames() -> Vec<BacktraceFrame> {
vec![
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![BacktraceSymbol {
name: Some(b"std::backtrace::Backtrace::create".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/backtrace.rs".to_vec())),
lineno: Some(100),
colno: None,
}],
},
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![BacktraceSymbol {
name: Some(b"__rust_maybe_catch_panic".to_vec()),
filename: None,
lineno: None,
colno: None,
}],
},
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![
BacktraceSymbol {
name: Some(b"std::rt::lang_start_internal".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/rt.rs".to_vec())),
lineno: Some(300),
colno: Some(5),
},
BacktraceSymbol {
name: Some(b"std::rt::lang_start".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/rt.rs".to_vec())),
lineno: Some(400),
colno: None,
},
],
},
]
}
#[test]
fn test_debug() {
let backtrace = Backtrace {
inner: Inner::Captured(LazilyResolvedCapture::new(Capture {
actual_start: 1,
resolved: true,
frames: generate_fake_frames(),
})),
};
#[rustfmt::skip]
let expected = "Backtrace [\
\n { fn: \"__rust_maybe_catch_panic\" },\
\n { fn: \"std::rt::lang_start_internal\", file: \"rust/rt.rs\", line: 300 },\
\n { fn: \"std::rt::lang_start\", file: \"rust/rt.rs\", line: 400 },\
\n]";
assert_eq!(format!("{:#?}", backtrace), expected);
// Format the backtrace a second time, just to make sure lazily resolved state is stable
assert_eq!(format!("{:#?}", backtrace), expected);
}
#[test]
fn test_frames() {
let backtrace = Backtrace {
inner: Inner::Captured(LazilyResolvedCapture::new(Capture {
actual_start: 1,
resolved: true,
frames: generate_fake_frames(),
})),
};
let frames = backtrace.frames();
#[rustfmt::skip]
let expected = vec![
"[
{ fn: \"std::backtrace::Backtrace::create\", file: \"rust/backtrace.rs\", line: 100 },
]",
"[
{ fn: \"__rust_maybe_catch_panic\" },
]",
"[
{ fn: \"std::rt::lang_start_internal\", file: \"rust/rt.rs\", line: 300 },
{ fn: \"std::rt::lang_start\", file: \"rust/rt.rs\", line: 400 },
]"
];
let mut iter = frames.iter().zip(expected.iter());
assert!(iter.all(|(f, e)| format!("{:#?}", f) == *e));
}
| 30.28125 | 92 | 0.494324 |
fc932a32b869e79f8c348f7bbb47baa9e6072591 | 10,173 | use crate::command;
use crate::maker_inc_connections;
use crate::maker_inc_connections::RegisterRollover;
use crate::maker_inc_connections::TakerMessage;
use crate::model::cfd::Completed;
use crate::model::cfd::Dlc;
use crate::model::cfd::OrderId;
use crate::model::cfd::Role;
use crate::model::cfd::RolloverCompleted;
use crate::model::cfd::RolloverError;
use crate::model::Identity;
use crate::oracle;
use crate::oracle::GetAnnouncement;
use crate::process_manager;
use crate::schnorrsig;
use crate::setup_contract;
use crate::wire;
use crate::wire::MakerToTaker;
use crate::wire::RolloverMsg;
use crate::Stopping;
use crate::Tasks;
use anyhow::Context as _;
use anyhow::Result;
use futures::channel::mpsc;
use futures::channel::mpsc::UnboundedSender;
use futures::future;
use futures::SinkExt;
use xtra::prelude::MessageChannel;
use xtra::Context;
use xtra::KeepRunning;
use xtra_productivity::xtra_productivity;
pub struct AcceptRollover;
pub struct RejectRollover;
pub struct ProtocolMsg(pub wire::RolloverMsg);
/// Message sent from the spawned task to `rollover_taker::Actor` to
/// notify that rollover has finished successfully.
struct RolloverSucceeded {
dlc: Dlc,
}
/// Message sent from the spawned task to `rollover_taker::Actor` to
/// notify that rollover has failed.
struct RolloverFailed {
error: RolloverError,
}
pub struct Actor {
order_id: OrderId,
send_to_taker_actor: Box<dyn MessageChannel<TakerMessage>>,
n_payouts: usize,
taker_id: Identity,
oracle_pk: schnorrsig::PublicKey,
sent_from_taker: Option<UnboundedSender<RolloverMsg>>,
oracle_actor: Box<dyn MessageChannel<GetAnnouncement>>,
on_stopping: Vec<Box<dyn MessageChannel<Stopping<Self>>>>,
register: Box<dyn MessageChannel<RegisterRollover>>,
tasks: Tasks,
executor: command::Executor,
}
impl Actor {
#[allow(clippy::too_many_arguments)]
pub fn new(
order_id: OrderId,
n_payouts: usize,
send_to_taker_actor: &(impl MessageChannel<TakerMessage> + 'static),
taker_id: Identity,
oracle_pk: schnorrsig::PublicKey,
oracle_actor: &(impl MessageChannel<GetAnnouncement> + 'static),
(on_stopping0, on_stopping1): (
&(impl MessageChannel<Stopping<Self>> + 'static),
&(impl MessageChannel<Stopping<Self>> + 'static),
),
process_manager: xtra::Address<process_manager::Actor>,
register: &(impl MessageChannel<RegisterRollover> + 'static),
db: sqlx::SqlitePool,
) -> Self {
Self {
order_id,
n_payouts,
send_to_taker_actor: send_to_taker_actor.clone_channel(),
taker_id,
oracle_pk,
sent_from_taker: None,
oracle_actor: oracle_actor.clone_channel(),
on_stopping: vec![on_stopping0.clone_channel(), on_stopping1.clone_channel()],
register: register.clone_channel(),
executor: command::Executor::new(db, process_manager),
tasks: Tasks::default(),
}
}
async fn complete(&mut self, completed: RolloverCompleted, ctx: &mut xtra::Context<Self>) {
if let Err(e) = self
.executor
.execute(self.order_id, |cfd| Ok(cfd.roll_over(completed)?))
.await
{
tracing::warn!(order_id = %self.order_id, "{:#}", e)
}
ctx.stop();
}
async fn accept(&mut self, ctx: &mut xtra::Context<Self>) -> Result<(), RolloverError> {
let order_id = self.order_id;
if self.sent_from_taker.is_some() {
tracing::warn!(%order_id, "Rollover already active");
return Ok(());
}
let (sender, receiver) = mpsc::unbounded();
self.sent_from_taker = Some(sender);
tracing::debug!(%order_id, "Maker accepts a rollover proposal" );
let (rollover_params, dlc, interval) = self
.executor
.execute(self.order_id, |cfd| Ok(cfd.accept_rollover_proposal()?))
.await?;
let oracle_event_id =
oracle::next_announcement_after(time::OffsetDateTime::now_utc() + interval)
.context("Failed to calculate next BitMexPriceEventId")?;
let taker_id = self.taker_id;
self.send_to_taker_actor
.send(maker_inc_connections::TakerMessage {
taker_id,
msg: wire::MakerToTaker::ConfirmRollover {
order_id,
oracle_event_id,
},
})
.await
.context("Maker connection actor disconnected")?
.context("Failed to send confirm rollover message")?;
let announcement = self
.oracle_actor
.send(oracle::GetAnnouncement(oracle_event_id))
.await
.context("Oracle actor disconnected")?
.context("Failed to get announcement")?;
let rollover_fut = setup_contract::roll_over(
self.send_to_taker_actor.sink().with(move |msg| {
future::ok(maker_inc_connections::TakerMessage {
taker_id,
msg: wire::MakerToTaker::RolloverProtocol { order_id, msg },
})
}),
receiver,
(self.oracle_pk, announcement),
rollover_params,
Role::Maker,
dlc,
self.n_payouts,
);
let this = ctx.address().expect("self to be alive");
self.tasks.add(async move {
let _: Result<(), xtra::Disconnected> = match rollover_fut.await {
Ok(dlc) => this.send(RolloverSucceeded { dlc }).await,
Err(source) => {
this.send(RolloverFailed {
error: RolloverError::Protocol { source },
})
.await
}
};
});
Ok(())
}
async fn reject(&mut self, ctx: &mut xtra::Context<Self>) -> Result<(), RolloverError> {
tracing::info!(id = %self.order_id, "Rejecting rollover proposal" );
self.send_to_taker_actor
.send(TakerMessage {
taker_id: self.taker_id,
msg: MakerToTaker::RejectRollover(self.order_id),
})
.await
.context("Maker connection actor disconnected")?
.context("Failed to send reject rollover message")?;
self.complete(RolloverCompleted::rejected(self.order_id), ctx)
.await;
ctx.stop();
Ok(())
}
pub async fn forward_protocol_msg(&mut self, msg: ProtocolMsg) -> Result<(), RolloverError> {
self.sent_from_taker
.as_mut()
.context("Rollover task is not active")? // Sender is set once `Accepted` is sent.
.send(msg.0)
.await
.context("Failed to forward message to rollover task")?;
Ok(())
}
}
#[async_trait::async_trait]
impl xtra::Actor for Actor {
async fn started(&mut self, ctx: &mut xtra::Context<Self>) {
let order_id = self.order_id;
tracing::info!(
%order_id,
"Received rollover proposal"
);
let this = ctx.address().expect("self to be alive");
let fut = async {
// Register ourselves with the actor handling connections with
// takers, so that it knows where to forward rollover messages
// which correspond to this instance
self.register
.send(RegisterRollover {
order_id,
address: this,
})
.await?;
self.executor
.execute(self.order_id, |cfd| Ok(cfd.start_rollover()?))
.await?;
anyhow::Ok(())
};
if let Err(source) = fut.await {
self.complete(
Completed::Failed {
order_id,
error: RolloverError::Other { source },
},
ctx,
)
.await;
}
}
async fn stopping(&mut self, ctx: &mut Context<Self>) -> KeepRunning {
let this = ctx.address().expect("self to be alive");
for channel in self.on_stopping.iter() {
let _ = channel.send(Stopping { me: this.clone() }).await;
}
KeepRunning::StopAll
}
}
#[xtra_productivity]
impl Actor {
async fn handle_accept_rollover(
&mut self,
_msg: AcceptRollover,
ctx: &mut xtra::Context<Self>,
) {
if let Err(error) = self.accept(ctx).await {
self.complete(
RolloverCompleted::Failed {
order_id: self.order_id,
error,
},
ctx,
)
.await;
};
}
async fn handle_reject_rollover(
&mut self,
_msg: RejectRollover,
ctx: &mut xtra::Context<Self>,
) {
if let Err(error) = self.reject(ctx).await {
self.complete(
RolloverCompleted::Failed {
order_id: self.order_id,
error,
},
ctx,
)
.await;
};
}
async fn handle_protocol_msg(&mut self, msg: ProtocolMsg, ctx: &mut xtra::Context<Self>) {
if let Err(error) = self.forward_protocol_msg(msg).await {
self.complete(
RolloverCompleted::Failed {
order_id: self.order_id,
error,
},
ctx,
)
.await;
};
}
async fn handle_rollover_failed(&mut self, msg: RolloverFailed, ctx: &mut xtra::Context<Self>) {
self.complete(RolloverCompleted::failed(self.order_id, msg.error), ctx)
.await
}
async fn handle_rollover_succeeded(
&mut self,
msg: RolloverSucceeded,
ctx: &mut xtra::Context<Self>,
) {
self.complete(RolloverCompleted::succeeded(self.order_id, msg.dlc), ctx)
.await
}
}
| 30.641566 | 100 | 0.564927 |
91deadf412775ad3e413af2f58e6acb407165198 | 905 | use wasm_bindgen::prelude::*;
// // A macro to provide `println!(..)`-style syntax for `console.log` logging.
#[macro_use]
macro_rules! log {
( $( $t:tt )* ) => {
web_sys::console::log_1(&format!( $( $t )* ).into());
}
}
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
mod color;
pub use color::*;
mod vertex;
pub use vertex::*;
mod image;
pub use image::*;
mod geometry;
pub use geometry::*;
mod buffer_attributes;
pub use buffer_attributes::*;
#[wasm_bindgen(start)]
pub fn main_js() -> Result<(), JsValue> {
// This provides better error messages in debug mode.
// It's disabled in release mode so it doesn't bloat up the file size.
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
log!("main_js");
Ok(())
}
| 23.815789 | 79 | 0.668508 |
7173d4e989ee3763826ffc3342edf7f51249afab | 3,690 | use crate::state::State;
use async_trait::async_trait;
use nu_engine::CommandArgs;
use nu_errors::ShellError;
use nu_protocol::{Signature, TaggedDictBuilder, UntaggedValue};
use nu_source::Tag;
use nu_stream::OutputStream;
use std::sync::{Arc, Mutex};
pub struct UseCmd {
state: Arc<Mutex<State>>,
}
impl UseCmd {
pub fn new(state: Arc<Mutex<State>>) -> Self {
Self { state }
}
}
#[async_trait]
impl nu_engine::WholeStreamCommand for UseCmd {
fn name(&self) -> &str {
"use"
}
fn signature(&self) -> Signature {
Signature::build("use")
.switch("cloud", "show default execution environment of cloud", None)
.switch(
"timeouts",
"show default execution environment for timeouts",
None,
)
}
fn usage(&self) -> &str {
"Modify the default execution environment of commands"
}
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
use_cmd(args, self.state.clone())
}
}
fn use_cmd(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {
let show_cloud = args.has_flag("cloud");
let show_timeouts = args.has_flag("timeouts");
let guard = state.lock().unwrap();
let active = guard.active_cluster();
let mut using_now = TaggedDictBuilder::new(Tag::default());
if show_cloud {
let project = match guard.active_cloud() {
Ok(c) => c.active_project().unwrap_or_else(|| "".to_string()),
Err(_e) => "".to_string(),
};
using_now.insert_value(
"cloud-organization",
guard
.active_cloud_org_name()
.unwrap_or_else(|| String::from("")),
);
using_now.insert_value(
"cloud",
guard
.active_cloud_name()
.unwrap_or_else(|| String::from("")),
);
using_now.insert_value("project", project);
} else {
using_now.insert_value("username", active.username());
using_now.insert_value("cluster", guard.active());
using_now.insert_value(
"bucket",
active
.active_bucket()
.unwrap_or_else(|| String::from("<not set>")),
);
using_now.insert_value(
"scope",
active.active_scope().unwrap_or_else(|| String::from("")),
);
using_now.insert_value(
"collection",
active
.active_collection()
.unwrap_or_else(|| String::from("")),
);
if let Some(co) = active.cloud_org() {
using_now.insert_value("cloud-organization", co);
}
}
if show_timeouts {
let timeouts = active.timeouts();
using_now.insert_value(
"data-timeout (ms)",
UntaggedValue::int(timeouts.data_timeout().as_millis() as i64),
);
using_now.insert_value(
"management-timeout (ms)",
UntaggedValue::int(timeouts.management_timeout().as_millis() as i64),
);
using_now.insert_value(
"analytics-timeout (ms)",
UntaggedValue::int(timeouts.analytics_timeout().as_millis() as i64),
);
using_now.insert_value(
"query-timeout (ms)",
UntaggedValue::int(timeouts.query_timeout().as_millis() as i64),
);
using_now.insert_value(
"search-timeout (ms)",
UntaggedValue::int(timeouts.search_timeout().as_millis() as i64),
);
}
let clusters = vec![using_now.into_value()];
Ok(clusters.into())
}
| 30.495868 | 93 | 0.561789 |
7248e6b4e3106aed78f3c3a7237667afa88d5545 | 62 | pub mod mtac_lora_h_868_eu868;
pub mod mtac_lora_h_915_us915;
| 20.666667 | 30 | 0.870968 |
de89adc81cc5d07b36e743ff4008c7fa018c0699 | 20,474 | // Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Client network controller, controls requests and responses from the
//! stratum server
use bufstream::BufStream;
use native_tls::{TlsConnector, TlsStream};
use serde_json;
use stats;
use std;
use std::io::{self, BufRead, ErrorKind, Read, Write};
use std::net::TcpStream;
use std::sync::{mpsc, Arc, RwLock};
use std::thread;
use time;
use types;
use util::LOGGER;
#[derive(Debug)]
pub enum Error {
ConnectionError(String),
RequestError(String),
ResponseError(String),
JsonError(String),
GeneralError(String),
}
impl From<serde_json::error::Error> for Error {
fn from(error: serde_json::error::Error) -> Self {
Error::JsonError(format!("Failed to parse JSON: {:?}", error))
}
}
impl<T> From<std::sync::PoisonError<T>> for Error {
fn from(error: std::sync::PoisonError<T>) -> Self {
Error::GeneralError(format!("Failed to get lock: {:?}", error))
}
}
impl<T> From<std::sync::mpsc::SendError<T>> for Error {
fn from(error: std::sync::mpsc::SendError<T>) -> Self {
Error::GeneralError(format!("Failed to send to a channel: {:?}", error))
}
}
struct Stream {
stream: Option<BufStream<TcpStream>>,
tls_stream: Option<BufStream<TlsStream<TcpStream>>>,
}
impl Stream {
fn new() -> Stream {
Stream {
stream: None,
tls_stream: None,
}
}
fn try_connect(&mut self, server_url: &str, tls: Option<bool>) -> Result<(), Error> {
match TcpStream::connect(server_url) {
Ok(conn) => {
if tls.is_some() && tls.unwrap() {
let connector = TlsConnector::new().map_err(|e| {
Error::ConnectionError(format!("Can't create TLS connector: {:?}", e))
})?;
let url_port: Vec<&str> = server_url.split(":").collect();
let splitted_url: Vec<&str> = url_port[0].split(".").collect();
let base_host = format!(
"{}.{}",
splitted_url[splitted_url.len() - 2],
splitted_url[splitted_url.len() - 1]
);
let mut stream = connector.connect(&base_host, conn).map_err(|e| {
Error::ConnectionError(format!("Can't establish TLS connection: {:?}", e))
})?;
stream.get_mut().set_nonblocking(true).map_err(|e| {
Error::ConnectionError(format!("Can't switch to nonblocking mode: {:?}", e))
})?;
self.tls_stream = Some(BufStream::new(stream));
} else {
let _ = conn.set_nonblocking(true).map_err(|e| {
Error::ConnectionError(format!("Can't switch to nonblocking mode: {:?}", e))
})?;
self.stream = Some(BufStream::new(conn));
}
Ok(())
}
Err(e) => Err(Error::ConnectionError(format!("{}", e))),
}
}
}
impl Write for Stream {
fn write(&mut self, b: &[u8]) -> Result<usize, std::io::Error> {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().write(b)
} else {
self.stream.as_mut().unwrap().write(b)
}
}
fn flush(&mut self) -> Result<(), std::io::Error> {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().flush()
} else {
self.stream.as_mut().unwrap().flush()
}
}
}
impl Read for Stream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().read(buf)
} else {
self.stream.as_mut().unwrap().read(buf)
}
}
}
impl BufRead for Stream {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().fill_buf()
} else {
self.stream.as_mut().unwrap().fill_buf()
}
}
fn consume(&mut self, amt: usize) {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().consume(amt)
} else {
self.stream.as_mut().unwrap().consume(amt)
}
}
fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().read_until(byte, buf)
} else {
self.stream.as_mut().unwrap().read_until(byte, buf)
}
}
fn read_line(&mut self, string: &mut String) -> io::Result<usize> {
if self.tls_stream.is_some() {
self.tls_stream.as_mut().unwrap().read_line(string)
} else {
self.stream.as_mut().unwrap().read_line(string)
}
}
}
pub struct Controller {
_id: u32,
server_url: String,
server_login: Option<String>,
server_password: Option<String>,
server_tls_enabled: Option<bool>,
stream: Option<Stream>,
rx: mpsc::Receiver<types::ClientMessage>,
pub tx: mpsc::Sender<types::ClientMessage>,
miner_tx: mpsc::Sender<types::MinerMessage>,
last_request_id: u32,
stats: Arc<RwLock<stats::Stats>>,
}
fn invlalid_error_response() -> types::RpcError {
types::RpcError {
code: 0,
message: "Invalid error response received".to_owned(),
}
}
impl Controller {
pub fn new(
server_url: &str,
server_login: Option<String>,
server_password: Option<String>,
server_tls_enabled: Option<bool>,
miner_tx: mpsc::Sender<types::MinerMessage>,
stats: Arc<RwLock<stats::Stats>>,
) -> Result<Controller, Error> {
let (tx, rx) = mpsc::channel::<types::ClientMessage>();
Ok(Controller {
_id: 0,
server_url: server_url.to_string(),
server_login: server_login,
server_password: server_password,
server_tls_enabled: server_tls_enabled,
stream: None,
tx: tx,
rx: rx,
miner_tx: miner_tx,
last_request_id: 0,
stats: stats,
})
}
pub fn try_connect(&mut self) -> Result<(), Error> {
self.stream = Some(Stream::new());
self.stream
.as_mut()
.unwrap()
.try_connect(&self.server_url, self.server_tls_enabled)?;
Ok(())
}
fn read_message(&mut self) -> Result<Option<String>, Error> {
if let None = self.stream {
return Err(Error::ConnectionError("broken pipe".to_string()));
}
let mut line = String::new();
match self.stream.as_mut().unwrap().read_line(&mut line) {
Ok(_) => {
// stream is not returning a proper error on disconnect
if line == "" {
return Err(Error::ConnectionError("broken pipe".to_string()));
}
return Ok(Some(line));
}
Err(ref e) if e.kind() == ErrorKind::BrokenPipe => {
return Err(Error::ConnectionError("broken pipe".to_string()));
}
Err(ref e) if e.kind() == ErrorKind::WouldBlock => {
return Ok(None);
}
Err(e) => {
error!(LOGGER, "Communication error with stratum server: {}", e);
return Err(Error::ConnectionError("broken pipe".to_string()));
}
}
}
fn send_message(&mut self, message: &str) -> Result<(), Error> {
if let None = self.stream {
return Err(Error::ConnectionError(String::from("No server connection")));
}
debug!(LOGGER, "sending request: {}", message);
let _ = self.stream.as_mut().unwrap().write(message.as_bytes());
let _ = self.stream.as_mut().unwrap().write("\n".as_bytes());
let _ = self.stream.as_mut().unwrap().flush();
Ok(())
}
fn send_message_get_job_template(&mut self) -> Result<(), Error> {
let req = types::RpcRequest {
id: self.last_request_id.to_string(),
jsonrpc: "2.0".to_string(),
method: "getjobtemplate".to_string(),
params: None,
};
let req_str = serde_json::to_string(&req)?;
{
let mut stats = self.stats.write()?;
stats.client_stats.last_message_sent = format!("Last Message Sent: Get New Job");
}
self.send_message(&req_str)
}
fn send_login(&mut self) -> Result<(), Error> {
// only send the login request if a login string is configured
let login_str = match self.server_login.clone() {
None => "".to_string(),
Some(server_login) => server_login.clone(),
};
if login_str == "" {
return Ok(());
}
let password_str = match self.server_password.clone() {
None => "".to_string(),
Some(server_password) => server_password.clone(),
};
let params = types::LoginParams {
login: login_str,
pass: password_str,
agent: "grin-miner".to_string(),
};
let req = types::RpcRequest {
id: self.last_request_id.to_string(),
jsonrpc: "2.0".to_string(),
method: "login".to_string(),
params: Some(serde_json::to_value(params)?),
};
let req_str = serde_json::to_string(&req)?;
{
let mut stats = self.stats.write()?;
stats.client_stats.last_message_sent = format!("Last Message Sent: Login");
}
self.send_message(&req_str)
}
fn send_message_get_status(&mut self) -> Result<(), Error> {
let req = types::RpcRequest {
id: self.last_request_id.to_string(),
jsonrpc: "2.0".to_string(),
method: "status".to_string(),
params: None,
};
let req_str = serde_json::to_string(&req)?;
self.send_message(&req_str)
}
fn send_message_submit(
&mut self,
height: u64,
job_id: u64,
edge_bits: u32,
nonce: u64,
pow: Vec<u64>,
) -> Result<(), Error> {
let params_in = types::SubmitParams {
height: height,
job_id: job_id,
edge_bits: edge_bits,
nonce: nonce,
pow: pow,
};
let params = serde_json::to_string(¶ms_in)?;
let req = types::RpcRequest {
id: self.last_request_id.to_string(),
jsonrpc: "2.0".to_string(),
method: "submit".to_string(),
params: Some(serde_json::from_str(¶ms)?),
};
let req_str = serde_json::to_string(&req)?;
{
let mut stats = self.stats.write()?;
stats.client_stats.last_message_sent = format!(
"Last Message Sent: Found share for height: {} - nonce: {}",
params_in.height, params_in.nonce
);
}
self.send_message(&req_str)
}
fn send_miner_job(&mut self, job: types::JobTemplate) -> Result<(), Error> {
let miner_message =
types::MinerMessage::ReceivedJob(job.height, job.job_id, job.difficulty, job.pre_pow);
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received = format!(
"Last Message Received: Start Job for Height: {}, Difficulty: {}",
job.height, job.difficulty
);
self.miner_tx.send(miner_message).map_err(|e| e.into())
}
fn send_miner_stop(&mut self) -> Result<(), Error> {
let miner_message = types::MinerMessage::StopJob;
self.miner_tx.send(miner_message).map_err(|e| e.into())
}
pub fn handle_request(&mut self, req: types::RpcRequest) -> Result<(), Error> {
debug!(LOGGER, "Received request type: {}", req.method);
match req.method.as_str() {
"job" => match req.params {
None => Err(Error::RequestError("No params in job request".to_owned())),
Some(params) => {
let job = serde_json::from_value::<types::JobTemplate>(params)?;
info!(LOGGER, "Got a new job: {:?}", job);
self.send_miner_job(job)
}
},
_ => Err(Error::RequestError("Unknonw method".to_owned())),
}
}
pub fn handle_response(&mut self, res: types::RpcResponse) -> Result<(), Error> {
debug!(LOGGER, "Received response with id: {}", res.id);
match res.method.as_str() {
// "status" response can be used to further populate stats object
"status" => {
if let Some(result) = res.result {
let st = serde_json::from_value::<types::WorkerStatus>(result)?;
info!(
LOGGER,
"Status for worker {} - Height: {}, Difficulty: {}, ({}/{}/{})",
st.id,
st.height,
st.difficulty,
st.accepted,
st.rejected,
st.stale
);
// Add these status to the stats
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received = format!(
"Last Message Received: Accepted: {}, Rejected: {}, Stale: {}",
st.accepted, st.rejected, st.stale
);
} else {
let err = res.error.unwrap_or_else(|| invlalid_error_response());
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received =
format!("Last Message Received: Failed to get status: {:?}", err);
error!(LOGGER, "Failed to get status: {:?}", err);
}
Ok(())
}
// "getjobtemplate" response gets sent to miners to work on
"getjobtemplate" => {
if let Some(result) = res.result {
let job: types::JobTemplate = serde_json::from_value(result)?;
{
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received = format!(
"Last Message Received: Got job for block {} at difficulty {}",
job.height, job.difficulty
);
}
info!(
LOGGER,
"Got a job at height {} and difficulty {}", job.height, job.difficulty
);
self.send_miner_job(job)
} else {
let err = res.error.unwrap_or_else(|| invlalid_error_response());
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received = format!(
"Last Message Received: Failed to get job template: {:?}",
err
);
error!(LOGGER, "Failed to get a job template: {:?}", err);
Ok(())
}
}
// "submit" response
"submit" => {
if let Some(result) = res.result {
info!(LOGGER, "Share Accepted!!");
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received =
format!("Last Message Received: Share Accepted!!");
stats.mining_stats.solution_stats.num_shares_accepted += 1;
let result = serde_json::to_string(&result)?;
if result.contains("blockfound") {
info!(LOGGER, "Block Found!!");
stats.client_stats.last_message_received =
format!("Last Message Received: Block Found!!");
stats.mining_stats.solution_stats.num_blocks_found += 1;
}
} else {
let err = res.error.unwrap_or_else(|| invlalid_error_response());
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received = format!(
"Last Message Received: Failed to submit a solution: {:?}",
err.message
);
if err.message.contains("too late") {
stats.mining_stats.solution_stats.num_staled += 1;
} else {
stats.mining_stats.solution_stats.num_rejected += 1;
}
error!(LOGGER, "Failed to submit a solution: {:?}", err);
}
Ok(())
}
// "keepalive" response
"keepalive" => {
if res.result.is_some() {
// Nothing to do for keepalive "ok"
// dont update last_message_received with good keepalive response
} else {
let err = res.error.unwrap_or_else(|| invlalid_error_response());
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received = format!(
"Last Message Received: Failed to request keepalive: {:?}",
err
);
error!(LOGGER, "Failed to request keepalive: {:?}", err);
}
Ok(())
}
// "login" response
"login" => {
if res.result.is_some() {
// Nothing to do for login "ok"
// dont update last_message_received with good login response
} else {
// This is a fatal error
let err = res.error.unwrap_or_else(|| invlalid_error_response());
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received =
format!("Last Message Received: Failed to log in: {:?}", err);
stats.client_stats.connection_status =
"Connection Status: Server requires login".to_string();
stats.client_stats.connected = false;
error!(LOGGER, "Failed to log in: {:?}", err);
}
Ok(())
}
// unknown method response
_ => {
let mut stats = self.stats.write()?;
stats.client_stats.last_message_received =
format!("Last Message Received: Unknown Response: {:?}", res);
warn!(LOGGER, "Unknown Response: {:?}", res);
Ok(())
}
}
}
pub fn run(mut self) {
let server_read_interval = 1;
let server_retry_interval = 5;
let mut next_server_read = time::get_time().sec + server_read_interval;
let status_interval = 30;
let mut next_status_request = time::get_time().sec + status_interval;
let mut next_server_retry = time::get_time().sec;
// Request the first job template
thread::sleep(std::time::Duration::from_secs(1));
let mut was_disconnected = true;
loop {
// Check our connection status, and try to correct if possible
if let None = self.stream {
if !was_disconnected {
let _ = self.send_miner_stop();
}
was_disconnected = true;
if time::get_time().sec > next_server_retry {
if let Err(_) = self.try_connect() {
let status = format!("Connection Status: Can't establish server connection to {}. Will retry every {} seconds",
self.server_url,
server_retry_interval);
warn!(LOGGER, "{}", status);
let mut stats = self.stats.write().unwrap();
stats.client_stats.connection_status = status;
stats.client_stats.connected = false;
self.stream = None;
} else {
let status = format!(
"Connection Status: Connected to Grin server at {}.",
self.server_url
);
warn!(LOGGER, "{}", status);
let mut stats = self.stats.write().unwrap();
stats.client_stats.connection_status = status;
}
next_server_retry = time::get_time().sec + server_retry_interval;
if let None = self.stream {
thread::sleep(std::time::Duration::from_secs(1));
continue;
}
}
} else {
// get new job template
if was_disconnected {
let _ = self.send_login();
let _ = self.send_message_get_job_template();
was_disconnected = false;
}
// read messages from server
if time::get_time().sec > next_server_read {
match self.read_message() {
Ok(message) => {
match message {
Some(m) => {
{
let mut stats = self.stats.write().unwrap();
stats.client_stats.connected = true;
}
// figure out what kind of message,
// and dispatch appropriately
debug!(LOGGER, "Received message: {}", m);
// Deserialize to see what type of object it is
if let Ok(v) = serde_json::from_str::<serde_json::Value>(&m) {
// Is this a response or request?
if v["method"] == String::from("job") {
// this is a request
match serde_json::from_str::<types::RpcRequest>(&m) {
Err(e) => error!(
LOGGER,
"Error parsing request {} : {:?}", m, e
),
Ok(request) => {
if let Err(err) = self.handle_request(request) {
error!(
LOGGER,
"Error handling request {} : :{:?}",
m,
err
)
}
}
}
continue;
} else {
// this is a response
match serde_json::from_str::<types::RpcResponse>(&m) {
Err(e) => error!(
LOGGER,
"Error parsing response {} : {:?}", m, e
),
Ok(response) => {
if let Err(err) = self.handle_response(response)
{
error!(
LOGGER,
"Error handling response {} : :{:?}",
m,
err
)
}
}
}
continue;
}
} else {
error!(LOGGER, "Error parsing message: {}", m)
}
}
None => {} // No messages from the server at this time
}
}
Err(e) => {
error!(LOGGER, "Error reading message: {:?}", e);
self.stream = None;
continue;
}
}
next_server_read = time::get_time().sec + server_read_interval;
}
// Request a status message from the server
if time::get_time().sec > next_status_request {
let _ = self.send_message_get_status();
next_status_request = time::get_time().sec + status_interval;
}
}
// Talk to the cuckoo miner plugin
while let Some(message) = self.rx.try_iter().next() {
debug!(LOGGER, "Client received message: {:?}", message);
let result = match message {
types::ClientMessage::FoundSolution(height, job_id, edge_bits, nonce, pow) => {
self.send_message_submit(height, job_id, edge_bits, nonce, pow)
}
types::ClientMessage::Shutdown => {
//TODO: Inform server?
debug!(LOGGER, "Shutting down client controller");
return;
}
};
if let Err(e) = result {
error!(LOGGER, "Mining Controller Error {:?}", e);
self.stream = None;
}
}
thread::sleep(std::time::Duration::from_millis(10));
} // loop
}
}
| 31.021212 | 117 | 0.620201 |
bf43da16be0b1944faab6fb2fcbcb062ee4db125 | 9,221 | //! All error types used throughout the library.
use chrono::{DateTime, Utc};
use thiserror::Error;
/// Errors that can occur while creating or using a `Celery` app.
#[derive(Error, Debug)]
pub enum CeleryError {
/// Raised when `Celery::consume_from` is given an empty array of queues.
#[error("at least one queue required to consume from")]
NoQueueToConsume,
/// Forced shutdown.
#[error("forced shutdown")]
ForcedShutdown,
/// Any other broker-level error that could happen when initializing or with an open
/// connection.
#[error("broker error")]
BrokerError(#[from] BrokerError),
/// Any other IO error that could occur.
#[error("IO error")]
IoError(#[from] std::io::Error),
/// A protocol error.
#[error("protocol error")]
ProtocolError(#[from] ProtocolError),
/// There is already a task registerd to this name.
#[error("there is already a task registered as '{0}'")]
TaskRegistrationError(String),
#[error("received unregistered task {0}")]
UnregisteredTaskError(String),
}
/// Errors that can occur while creating or using a `Beat` app.
#[derive(Error, Debug)]
pub enum BeatError {
/// Any broker-level error.
#[error("broker error")]
BrokerError(#[from] BrokerError),
/// A protocol error.
#[error("protocol error")]
ProtocolError(#[from] ProtocolError),
/// An error with a task schedule.
#[error("task schedule error")]
ScheduleError(#[from] ScheduleError),
}
/// Errors that are related to task schedules.
#[derive(Error, Debug)]
pub enum ScheduleError {
/// Error that can occur while creating a cron schedule.
#[error("invalid cron schedule: {0}")]
CronScheduleError(String),
}
/// Errors that can occur at the task level.
#[derive(Error, Debug)]
pub enum TaskError {
/// An error that is expected to happen every once in a while.
///
/// These errors will only be logged at the `WARN` level and will always trigger a task
/// retry unless [`max_retries`](../task/struct.TaskOptions.html#structfield.max_retries)
/// is set to 0 (or max retries is exceeded).
///
/// A typical example is a task that makes an HTTP request to an external service.
/// If that service is temporarily unavailable the task should raise an `ExpectedError`.
///
/// Tasks are always retried with capped exponential backoff.
#[error("task raised expected error: {0}")]
ExpectedError(String),
/// Should be used when a task encounters an error that is unexpected.
///
/// These errors will always be logged at the `ERROR` level. The retry behavior
/// when this error is encountered is determined by the
/// [`TaskOptions::retry_for_unexpected`](../task/struct.TaskOptions.html#structfield.retry_for_unexpected)
/// setting.
#[error("task raised unexpected error: {0}")]
UnexpectedError(String),
/// Raised when a task runs over its time limit specified by the
/// [`TaskOptions::time_limit`](../task/struct.TaskOptions.html#structfield.time_limit) setting.
///
/// These errors are logged at the `ERROR` level but are otherwise treated like
/// `ExpectedError`s in that they will trigger a retry when `max_retries` is anything but 0.
///
/// Typically a task implementation doesn't need to return these errors directly
/// because they will be raised automatically when the task runs over it's `time_limit`,
/// provided the task yields control at some point (like with non-blocking IO).
#[error("task timed out")]
TimeoutError,
/// A task can return this error variant to manually trigger a retry.
///
/// This error variant should generally not be used directly. Instead, you should
/// call the `Task::retry_with_countdown` or `Task::retry_with_eta` trait methods
/// to manually trigger a retry from within a task.
#[error("task retry triggered")]
Retry(Option<DateTime<Utc>>),
}
/// Errors that can occur while tracing a task.
#[derive(Error, Debug)]
pub(crate) enum TraceError {
/// Raised when a task throws an error while executing.
#[error("task failed")]
TaskError(TaskError),
/// Raised when an expired task is received.
#[error("task expired")]
ExpirationError,
/// Raised when a task should be retried.
#[error("retrying task")]
Retry(Option<DateTime<Utc>>),
}
/// Errors that can occur at the broker level.
#[derive(Error, Debug)]
pub enum BrokerError {
/// Raised when a broker URL can't be parsed.
#[error("invalid broker URL '{0}'")]
InvalidBrokerUrl(String),
/// The queue you're attempting to use has not been defined.
#[error("unknown queue '{0}'")]
UnknownQueue(String),
/// Broker is disconnected.
#[error("broker not connected")]
NotConnected,
/// Any IO error that could occur.
#[error("IO error \"{0}\"")]
IoError(#[from] std::io::Error),
/// Deserilize error
#[error("Deserialize error \"{0}\"")]
DeserializeError(#[from] serde_json::Error),
/// Routing pattern error
#[error("Routing pattern error \"{0}\"")]
BadRoutingPattern(#[from] BadRoutingPattern),
/// Protocol error
#[error("Protocol error \"{0}\"")]
ProtocolError(#[from] ProtocolError),
/// Any other AMQP error that could happen.
#[error("AMQP error \"{0}\"")]
AMQPError(#[from] lapin::Error),
/// Any other Redis error that could happen.
#[error("Redis error \"{0}\"")]
RedisError(#[from] redis::RedisError),
}
impl BrokerError {
pub fn is_connection_error(&self) -> bool {
match self {
BrokerError::IoError(_) | BrokerError::NotConnected => true,
BrokerError::AMQPError(err) => matches!(err,
lapin::Error::ProtocolError(_) |
lapin::Error::InvalidConnectionState(_) |
lapin::Error::InvalidChannelState(_)
),
BrokerError::RedisError(err) => {
err.is_connection_dropped() || err.is_connection_refusal()
}
_ => false,
}
}
}
/// An invalid glob pattern for a routing rule.
#[derive(Error, Debug)]
#[error("invalid glob routing rule")]
pub struct BadRoutingPattern(#[from] globset::Error);
/// Errors that can occur due to messages not conforming to the protocol.
#[derive(Error, Debug)]
pub enum ProtocolError {
/// Raised when a required message property is missing.
#[error("missing required property '{0}'")]
MissingRequiredProperty(String),
/// Raised when the headers are missing altogether.
#[error("missing headers")]
MissingHeaders,
/// Raised when a required message header is missing.
#[error("missing required property '{0}'")]
MissingRequiredHeader(String),
/// Raised when serializing or de-serializing a message body fails.
#[error("message body serialization error")]
BodySerializationError(#[from] ContentTypeError),
/// Raised when field value is invalid.
#[error("invalid property '{0}'")]
InvalidProperty(String),
}
impl From<serde_json::Error> for ProtocolError {
fn from(err: serde_json::Error) -> Self {
Self::from(ContentTypeError::from(err))
}
}
#[cfg(any(test, feature = "extra_content_types"))]
impl From<serde_yaml::Error> for ProtocolError {
fn from(err: serde_yaml::Error) -> Self {
Self::from(ContentTypeError::from(err))
}
}
#[cfg(any(test, feature = "extra_content_types"))]
impl From<serde_pickle::error::Error> for ProtocolError {
fn from(err: serde_pickle::error::Error) -> Self {
Self::from(ContentTypeError::from(err))
}
}
#[cfg(any(test, feature = "extra_content_types"))]
impl From<rmp_serde::decode::Error> for ProtocolError {
fn from(err: rmp_serde::decode::Error) -> Self {
Self::from(ContentTypeError::from(err))
}
}
#[cfg(any(test, feature = "extra_content_types"))]
impl From<rmp_serde::encode::Error> for ProtocolError {
fn from(err: rmp_serde::encode::Error) -> Self {
Self::from(ContentTypeError::from(err))
}
}
#[cfg(any(test, feature = "extra_content_types"))]
impl From<rmpv::ext::Error> for ProtocolError {
fn from(err: rmpv::ext::Error) -> Self {
Self::from(ContentTypeError::from(err))
}
}
#[derive(Error, Debug)]
pub enum ContentTypeError {
#[error("JSON serialization error")]
Json(#[from] serde_json::Error),
#[cfg(any(test, feature = "extra_content_types"))]
#[error("YAML serialization error")]
Yaml(#[from] serde_yaml::Error),
#[cfg(any(test, feature = "extra_content_types"))]
#[error("Pickle serialization error")]
Pickle(#[from] serde_pickle::error::Error),
#[cfg(any(test, feature = "extra_content_types"))]
#[error("MessagePack decoding error")]
MsgPackDecode(#[from] rmp_serde::decode::Error),
#[cfg(any(test, feature = "extra_content_types"))]
#[error("MessagePack encoding error")]
MsgPackEncode(#[from] rmp_serde::encode::Error),
#[cfg(any(test, feature = "extra_content_types"))]
#[error("MessagePack value error")]
MsgPackValue(#[from] rmpv::ext::Error),
#[error("Unknown content type error")]
Unknown,
}
| 33.050179 | 111 | 0.657955 |
1c0f4f85eca715976c37284476678a6f17880652 | 6,160 | //! Access to the real-time state of the joysticks.
//!
//! `joystick` provides an interface to the state of the joysticks.
//!
//! Each joystick is identified by an index that is passed to the functions of this module.
//!
//! This module allows users to query the state of joysticks at any time and directly,
//! without having to deal with a window and its events. Compared to the [`JoystickMoved`],
//! [`JoystickButtonPressed`] and [`JoystickButtonReleased`] events, `Joystick` can retrieve the
//! state of axes and buttons of joysticks at any time (you don't need to store and update a
//! boolean on your side in order to know if a button is pressed or released),
//! and you always get the real state of joysticks, even if they are moved,
//! pressed or released when your window is out of focus and no event is triggered.
//!
//! [`JoystickMoved`]: ::window::Event::JoystickMoved
//! [`JoystickButtonPressed`]: ::window::Event::JoystickButtonPressed
//! [`JoystickButtonReleased`]: ::window::Event::JoystickButtonReleased
//!
//! SFML supports:
//!
//! - 8 joysticks ([`COUNT`])
//! - 32 buttons per joystick ([`BUTTON_COUNT`])
//! - 8 axes per joystick ([`AXIS_COUNT`])
//!
//! Unlike the keyboard or mouse, the state of joysticks is sometimes not directly
//! available (depending on the OS), therefore an [`update`] function must be called in order to
//! update the current state of joysticks. When you have a window with event handling, this is
//! done automatically, you don't need to call anything. But if you have no window, or if you want
//! to check joysticks state before creating one, you must call [`update`] explicitly.
//! # Usage example
//!
//! ```
//! use sfml::window::joystick;
//!
//! // If joystick #0 is connected
//! if joystick::is_connected(0) {
//! // How many buttons does joystick #0 support?
//! let _buttons = joystick::button_count(0);
//! // Does joystick #0 define a X axis?
//! let _hax_x = joystick::has_axis(0, joystick::Axis::X);
//! // Is button #2 pressed on joystick #0?
//! let _pressed = joystick::is_button_pressed(0, 2);
//! // What's the current position of the Y axis on joystick #0?
//! let _position = joystick::axis_position(0, joystick::Axis::Y);
//! }
//! ```
//!
//! [`COUNT`]: joystick::COUNT
//! [`BUTTON_COUNT`]: joystick::BUTTON_COUNT
//! [`AXIS_COUNT`]: joystick::AXIS_COUNT
//! [`update`]: joystick::update
//!
use crate::sf_bool_ext::SfBoolExt;
use csfml_window_sys as ffi;
/// Maximum number of supported joysticks.
pub const COUNT: u32 = 8;
/// Maximum number of supported buttons.
pub const BUTTON_COUNT: u32 = 32;
/// Maximum number of supported axes.
pub const AXIS_COUNT: u32 = 8;
/// Axes supported by SFML joysticks
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Copy)]
#[repr(u32)]
pub enum Axis {
/// The X axis.
X = 0,
/// The Y axis.
Y = 1,
/// The Z axis.
Z = 2,
/// The R axis.
R = 3,
/// The U axis.
U = 4,
/// The V axis.
V = 5,
/// The X axis of the point-of-view hat.
PovX = 6,
/// The Y axis of the point-of-view hat.
PovY = 7,
}
impl Axis {
fn raw(self) -> ffi::sfJoystickAxis {
unsafe { ::std::mem::transmute(self) }
}
pub(super) unsafe fn from_raw(raw: ffi::sfJoystickAxis) -> Self {
::std::mem::transmute(raw)
}
}
/// Structure holding a joystick's identification.
#[derive(Debug)]
pub struct Identification {
/// Name of the joystick.
pub name: String,
/// Manufacturer identifier.
pub vendor_id: u32,
/// Product identifier.
pub product_id: u32,
}
/// Check if the joystick is connected
///
/// # Arguments
/// * joystick - Index of the joystick to check
///
/// Return true if the joystick is connected, false otherwise
pub fn is_connected(joystick: u32) -> bool {
unsafe { ffi::sfJoystick_isConnected(joystick).to_bool() }
}
/// Return the number of buttons supported by a joystick
///
/// # Arguments
/// * joystick - Index of the joystick
///
/// Return the number of buttons supported by the joystick.
pub fn button_count(joystick: u32) -> u32 {
unsafe { ffi::sfJoystick_getButtonCount(joystick) }
}
/// Check if the joystick support a given axis
///
/// If the joystick is not connected, this function returns false.
///
/// # Arguments
/// * joystick - Index of the joystick
/// * axis - Axis to check
///
/// Return true if the joystick supports the axis, false otherwise
pub fn has_axis(joystick: u32, axis: Axis) -> bool {
unsafe { ffi::sfJoystick_hasAxis(joystick, axis.raw()).to_bool() }
}
/// Check if the button is pressed on a given joystick.
///
/// If the joystick is not connected, this function returns false.
///
/// # Arguments
/// * joystick - Index of the joystick
/// * button - Button to check
///
/// Return true if the button is pressed, false otherwise
pub fn is_button_pressed(joystick: u32, button: u32) -> bool {
unsafe { ffi::sfJoystick_isButtonPressed(joystick, button).to_bool() }
}
/// Get the current position on a given axis, on a given joystick.
///
/// If the joystick is not connected, this function returns 0.
///
/// # Arguments
/// * joystick - Index of the joystick
/// * axis - Axis to check
///
/// Return the current position of the axis, in range [-100 .. 100]
pub fn axis_position(joystick: u32, axis: Axis) -> f32 {
unsafe { ffi::sfJoystick_getAxisPosition(joystick, axis.raw()) }
}
/// Update the states of all joysticks
///
/// This function is used internally by SFML, so you normally
/// don't have to call it explicitely. However, you may need to
/// call it if you have no window yet (or no window at all):
/// in this case the joysticks states are not updated automatically.
pub fn update() {
unsafe {
ffi::sfJoystick_update();
}
}
/// Get the joystick information.
pub fn identification(joystick: u32) -> Identification {
use std::ffi::CStr;
let raw = unsafe { ffi::sfJoystick_getIdentification(joystick) };
Identification {
name: unsafe { CStr::from_ptr(raw.name).to_string_lossy().into_owned() },
vendor_id: raw.vendorId,
product_id: raw.productId,
}
}
| 32.592593 | 98 | 0.668019 |
90c31a06be52320efdfe190cf54ad5a036ab1085 | 2,171 | use ignore;
use errors::Error;
use std::io::ErrorKind;
use std::path::{Path, PathBuf};
use file_patcher::FilePatcher;
use query::Query;
use stats::Stats;
pub struct DirectoryPatcher {
path: PathBuf,
dry_run: bool,
stats: Stats,
}
impl DirectoryPatcher {
pub fn new(path: PathBuf) -> DirectoryPatcher {
let stats = Stats::default();
DirectoryPatcher {
path,
dry_run: false,
stats,
}
}
pub fn patch(&mut self, query: Query) -> Result<(), Error> {
self.walk(query)?;
Ok(())
}
pub fn stats(self) -> Stats {
self.stats
}
pub fn dry_run(&mut self, dry_run: bool) {
self.dry_run = dry_run
}
pub fn patch_file(&mut self, entry: &Path, query: &Query) -> Result<(), Error> {
let file_patcher = FilePatcher::new(entry.to_path_buf(), &query);
if let Err(err) = &file_patcher {
match err.kind() {
// Just ignore binary or non-utf8 files
ErrorKind::InvalidData => return Ok(()),
_ => return Error::from_read_error(entry, err),
}
}
let file_patcher = file_patcher.unwrap();
let replacements = file_patcher.replacements();
if replacements.is_empty() {
return Ok(());
}
self.stats.update(replacements.len());
file_patcher.print_patch();
if self.dry_run {
return Ok(());
}
if let Err(err) = file_patcher.run() {
return Error::from_write_error(&entry, &err);
}
Ok(())
}
fn walk(&mut self, query: Query) -> Result<(), Error> {
for result in ignore::Walk::new(&self.path) {
match result {
Ok(entry) => {
if let Some(file_type) = entry.file_type() {
if file_type.is_file() {
self.patch_file(&entry.path(), &query)?;
}
}
}
Err(err) => return Err(err.into()),
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {}
| 25.845238 | 84 | 0.497927 |
1c268b49cd75b6f22bd6fd02efdc4512ceca6ba8 | 2,143 | //! This module implements manually tracked test coverage, which useful for
//! quickly finding a test responsible for testing a particular bit of code.
//!
//! See <https://matklad.github.io/2018/06/18/a-trick-for-test-maintenance.html>
//! for details, but the TL;DR is that you write your test as
//!
//! ```rust,no_run
//! #[test]
//! fn test_foo() {
//! covers!(test_foo);
//! }
//! ```
//!
//! and in the code under test you write
//!
//! ```rust,no_run
//! # use test_utils::tested_by;
//! # fn some_condition() -> bool { true }
//! fn foo() {
//! if some_condition() {
//! tested_by!(test_foo);
//! }
//! }
//! ```
//!
//! This module then checks that executing the test indeed covers the specified
//! function. This is useful if you come back to the `foo` function ten years
//! later and wonder where the test are: now you can grep for `test_foo`.
use std::sync::atomic::{AtomicUsize, Ordering};
#[macro_export]
macro_rules! tested_by {
($ident:ident) => {{
#[cfg(test)]
{
// sic! use call-site crate
crate::marks::$ident.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
}
}};
}
#[macro_export]
macro_rules! covers {
($ident:ident) => {
// sic! use call-site crate
let _checker = $crate::marks::MarkChecker::new(&crate::marks::$ident);
};
}
#[macro_export]
macro_rules! marks {
($($ident:ident)*) => {
$(
#[allow(bad_style)]
pub(crate) static $ident: std::sync::atomic::AtomicUsize =
std::sync::atomic::AtomicUsize::new(0);
)*
};
}
pub struct MarkChecker {
mark: &'static AtomicUsize,
value_on_entry: usize,
}
impl MarkChecker {
pub fn new(mark: &'static AtomicUsize) -> MarkChecker {
let value_on_entry = mark.load(Ordering::SeqCst);
MarkChecker { mark, value_on_entry }
}
}
impl Drop for MarkChecker {
fn drop(&mut self) {
if std::thread::panicking() {
return;
}
let value_on_exit = self.mark.load(Ordering::SeqCst);
assert!(value_on_exit > self.value_on_entry, "mark was not hit")
}
}
| 26.134146 | 83 | 0.59776 |
1d3b8875a4c7143ec092812818d7b0d64137c925 | 161 | use rtools::{Rglica, ToRglica};
use crate::{impl_view, view, View, ViewBase};
#[view]
#[derive(Default, Debug)]
pub struct ImageView {}
impl_view!(ImageView);
| 17.888889 | 45 | 0.708075 |
61de4d0d82592a8e5e4af49592803c994c6bc93c | 5,854 | //! Displays several lines with both methods.
use amethyst::{
controls::{FlyControlBundle, FlyControlTag},
core::{
nalgebra::{Point3, Vector3},
transform::{Transform, TransformBundle},
Time,
},
ecs::{Read, System, Write},
input::InputBundle,
prelude::*,
renderer::*,
utils::application_root_dir,
};
struct ExampleLinesSystem;
impl<'s> System<'s> for ExampleLinesSystem {
type SystemData = (
Write<'s, DebugLines>, // Request DebugLines resource
Read<'s, Time>,
);
fn run(&mut self, (mut debug_lines_resource, time): Self::SystemData) {
// Drawing debug lines, as a resource
let t = (time.absolute_time_seconds() as f32).cos();
debug_lines_resource.draw_direction(
[t, 0.0, 0.5].into(),
[0.0, 0.3, 0.0].into(),
[0.5, 0.05, 0.65, 1.0].into(),
);
debug_lines_resource.draw_line(
[t, 0.0, 0.5].into(),
[0.0, 0.0, 0.2].into(),
[0.5, 0.05, 0.65, 1.0].into(),
);
}
}
struct ExampleState;
impl SimpleState for ExampleState {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
// Setup debug lines as a resource
data.world
.add_resource(DebugLines::new().with_capacity(100));
// Configure width of lines. Optional step
data.world.add_resource(DebugLinesParams {
line_width: 1.0 / 400.0,
});
// Setup debug lines as a component and add lines to render axis&grid
let mut debug_lines_component = DebugLinesComponent::new().with_capacity(100);
debug_lines_component.add_direction(
[0.0, 0.0001, 0.0].into(),
[0.2, 0.0, 0.0].into(),
[1.0, 0.0, 0.23, 1.0].into(),
);
debug_lines_component.add_direction(
[0.0, 0.0, 0.0].into(),
[0.0, 0.2, 0.0].into(),
[0.5, 0.85, 0.1, 1.0].into(),
);
debug_lines_component.add_direction(
[0.0, 0.0001, 0.0].into(),
[0.0, 0.0, 0.2].into(),
[0.2, 0.75, 0.93, 1.0].into(),
);
let width: u32 = 10;
let depth: u32 = 10;
let main_color = [0.4, 0.4, 0.4, 1.0].into();
// Grid lines in X-axis
for x in 0..=width {
let (x, width, depth) = (x as f32, width as f32, depth as f32);
let position = Point3::new(x - width / 2.0, 0.0, -depth / 2.0);
let direction = Vector3::new(0.0, 0.0, depth);
debug_lines_component.add_direction(position, direction, main_color);
// Sub-grid lines
if x != width {
for sub_x in 1..10 {
let sub_offset = Vector3::new((1.0 / 10.0) * sub_x as f32, -0.001, 0.0);
debug_lines_component.add_direction(
position + sub_offset,
direction,
[0.1, 0.1, 0.1, 0.1].into(),
);
}
}
}
// Grid lines in Z-axis
for z in 0..=depth {
let (z, width, depth) = (z as f32, width as f32, depth as f32);
let position = Point3::new(-width / 2.0, 0.0, z - depth / 2.0);
let direction = Vector3::new(width, 0.0, 0.0);
debug_lines_component.add_direction(position, direction, main_color);
// Sub-grid lines
if z != depth {
for sub_z in 1..10 {
let sub_offset = Vector3::new(0.0, -0.001, (1.0 / 10.0) * sub_z as f32);
debug_lines_component.add_direction(
position + sub_offset,
direction,
[0.1, 0.1, 0.1, 0.0].into(),
);
}
}
}
data.world.register::<DebugLinesComponent>();
data.world
.create_entity()
.with(debug_lines_component)
.build();
// Setup camera
let mut local_transform = Transform::default();
local_transform.set_position([0.0, 0.5, 2.0].into());
data.world
.create_entity()
.with(FlyControlTag)
.with(Camera::from(Projection::perspective(
1.33333,
std::f32::consts::FRAC_PI_2,
)))
.with(local_transform)
.build();
}
}
fn main() -> amethyst::Result<()> {
amethyst::start_logger(Default::default());
let app_root = application_root_dir()?;
let display_config_path = app_root.join("examples/debug_lines/resources/display.ron");
let key_bindings_path = app_root.join("examples/debug_lines/resources/input.ron");
let resources = app_root.join("examples/assets/");
let pipe = Pipeline::build().with_stage(
Stage::with_backbuffer()
.clear_target([0.001, 0.005, 0.005, 1.0], 1.0)
.with_pass(DrawDebugLines::<PosColorNorm>::new()),
);
let config = DisplayConfig::load(display_config_path);
let fly_control_bundle = FlyControlBundle::<String, String>::new(
Some(String::from("move_x")),
Some(String::from("move_y")),
Some(String::from("move_z")),
)
.with_sensitivity(0.1, 0.1);
let game_data = GameDataBuilder::default()
.with_bundle(
InputBundle::<String, String>::new().with_bindings_from_file(&key_bindings_path)?,
)?
.with(ExampleLinesSystem, "example_lines_system", &[])
.with_bundle(fly_control_bundle)?
.with_bundle(TransformBundle::new().with_dep(&["fly_movement"]))?
.with_bundle(RenderBundle::new(pipe, Some(config)))?;
let mut game = Application::new(resources, ExampleState, game_data)?;
game.run();
Ok(())
}
| 32.88764 | 94 | 0.529211 |
8ffd62c0dd66f15204be6f442848837a7c6a6f6b | 13,437 | // This file is part of the uutils coreutils package.
//
// (c) Anthony Deschamps <[email protected]>
// (c) Sylvestre Ledru <[email protected]>
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (chrono) Datelike Timelike ; (format) DATEFILE MMDDhhmm ; (vars) datetime datetimes
use chrono::{DateTime, FixedOffset, Local, Offset, Utc};
#[cfg(windows)]
use chrono::{Datelike, Timelike};
use clap::{crate_version, App, Arg};
#[cfg(all(unix, not(target_os = "macos"), not(target_os = "redox")))]
use libc::{clock_settime, timespec, CLOCK_REALTIME};
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use uucore::display::Quotable;
#[cfg(not(any(target_os = "macos", target_os = "redox")))]
use uucore::error::FromIo;
use uucore::error::{UResult, USimpleError};
use uucore::show_error;
#[cfg(windows)]
use winapi::{
shared::minwindef::WORD,
um::{minwinbase::SYSTEMTIME, sysinfoapi::SetSystemTime},
};
// Options
const DATE: &str = "date";
const HOURS: &str = "hours";
const MINUTES: &str = "minutes";
const SECONDS: &str = "seconds";
const HOUR: &str = "hour";
const MINUTE: &str = "minute";
const SECOND: &str = "second";
const NS: &str = "ns";
const NAME: &str = "date";
const ABOUT: &str = "print or set the system date and time";
const OPT_DATE: &str = "date";
const OPT_FORMAT: &str = "format";
const OPT_FILE: &str = "file";
const OPT_DEBUG: &str = "debug";
const OPT_ISO_8601: &str = "iso-8601";
const OPT_RFC_EMAIL: &str = "rfc-email";
const OPT_RFC_3339: &str = "rfc-3339";
const OPT_SET: &str = "set";
const OPT_REFERENCE: &str = "reference";
const OPT_UNIVERSAL: &str = "universal";
const OPT_UNIVERSAL_2: &str = "utc";
// Help strings
static ISO_8601_HELP_STRING: &str = "output date/time in ISO 8601 format.
FMT='date' for date only (the default),
'hours', 'minutes', 'seconds', or 'ns'
for date and time to the indicated precision.
Example: 2006-08-14T02:34:56-06:00";
static RFC_5322_HELP_STRING: &str = "output date and time in RFC 5322 format.
Example: Mon, 14 Aug 2006 02:34:56 -0600";
static RFC_3339_HELP_STRING: &str = "output date/time in RFC 3339 format.
FMT='date', 'seconds', or 'ns'
for date and time to the indicated precision.
Example: 2006-08-14 02:34:56-06:00";
#[cfg(not(any(target_os = "macos", target_os = "redox")))]
static OPT_SET_HELP_STRING: &str = "set time described by STRING";
#[cfg(target_os = "macos")]
static OPT_SET_HELP_STRING: &str = "set time described by STRING (not available on mac yet)";
#[cfg(target_os = "redox")]
static OPT_SET_HELP_STRING: &str = "set time described by STRING (not available on redox yet)";
/// Settings for this program, parsed from the command line
struct Settings {
utc: bool,
format: Format,
date_source: DateSource,
set_to: Option<DateTime<FixedOffset>>,
}
/// Various ways of displaying the date
enum Format {
Iso8601(Iso8601Format),
Rfc5322,
Rfc3339(Rfc3339Format),
Custom(String),
Default,
}
/// Various places that dates can come from
enum DateSource {
Now,
Custom(String),
File(PathBuf),
}
enum Iso8601Format {
Date,
Hours,
Minutes,
Seconds,
Ns,
}
impl<'a> From<&'a str> for Iso8601Format {
fn from(s: &str) -> Self {
match s {
HOURS | HOUR => Iso8601Format::Hours,
MINUTES | MINUTE => Iso8601Format::Minutes,
SECONDS | SECOND => Iso8601Format::Seconds,
NS => Iso8601Format::Ns,
DATE => Iso8601Format::Date,
// Should be caught by clap
_ => panic!("Invalid format: {}", s),
}
}
}
enum Rfc3339Format {
Date,
Seconds,
Ns,
}
impl<'a> From<&'a str> for Rfc3339Format {
fn from(s: &str) -> Self {
match s {
DATE => Rfc3339Format::Date,
SECONDS | SECOND => Rfc3339Format::Seconds,
NS => Rfc3339Format::Ns,
// Should be caught by clap
_ => panic!("Invalid format: {}", s),
}
}
}
#[uucore_procs::gen_uumain]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let syntax = format!(
"{0} [OPTION]... [+FORMAT]...
{0} [OPTION]... [MMDDhhmm[[CC]YY][.ss]]",
NAME
);
let matches = uu_app().override_usage(&syntax[..]).get_matches_from(args);
let format = if let Some(form) = matches.value_of(OPT_FORMAT) {
if !form.starts_with('+') {
return Err(USimpleError::new(
1,
format!("invalid date {}", form.quote()),
));
}
let form = form[1..].to_string();
Format::Custom(form)
} else if let Some(fmt) = matches
.values_of(OPT_ISO_8601)
.map(|mut iter| iter.next().unwrap_or(DATE).into())
{
Format::Iso8601(fmt)
} else if matches.is_present(OPT_RFC_EMAIL) {
Format::Rfc5322
} else if let Some(fmt) = matches.value_of(OPT_RFC_3339).map(Into::into) {
Format::Rfc3339(fmt)
} else {
Format::Default
};
let date_source = if let Some(date) = matches.value_of(OPT_DATE) {
DateSource::Custom(date.into())
} else if let Some(file) = matches.value_of(OPT_FILE) {
DateSource::File(file.into())
} else {
DateSource::Now
};
let set_to = match matches.value_of(OPT_SET).map(parse_date) {
None => None,
Some(Err((input, _err))) => {
return Err(USimpleError::new(
1,
format!("invalid date {}", input.quote()),
));
}
Some(Ok(date)) => Some(date),
};
let settings = Settings {
utc: matches.is_present(OPT_UNIVERSAL),
format,
date_source,
set_to,
};
if let Some(date) = settings.set_to {
// All set time functions expect UTC datetimes.
let date: DateTime<Utc> = if settings.utc {
date.with_timezone(&Utc)
} else {
date.into()
};
return set_system_datetime(date);
} else {
// Declare a file here because it needs to outlive the `dates` iterator.
let file: File;
// Get the current time, either in the local time zone or UTC.
let now: DateTime<FixedOffset> = if settings.utc {
let now = Utc::now();
now.with_timezone(&now.offset().fix())
} else {
let now = Local::now();
now.with_timezone(now.offset())
};
// Iterate over all dates - whether it's a single date or a file.
let dates: Box<dyn Iterator<Item = _>> = match settings.date_source {
DateSource::Custom(ref input) => {
let date = parse_date(input.clone());
let iter = std::iter::once(date);
Box::new(iter)
}
DateSource::File(ref path) => {
file = File::open(path).unwrap();
let lines = BufReader::new(file).lines();
let iter = lines.filter_map(Result::ok).map(parse_date);
Box::new(iter)
}
DateSource::Now => {
let iter = std::iter::once(Ok(now));
Box::new(iter)
}
};
let format_string = make_format_string(&settings);
// Format all the dates
for date in dates {
match date {
Ok(date) => {
// GNU `date` uses `%N` for nano seconds, however crate::chrono uses `%f`
let format_string = &format_string.replace("%N", "%f");
let formatted = date.format(format_string).to_string().replace("%f", "%N");
println!("{}", formatted);
}
Err((input, _err)) => show_error!("invalid date {}", input.quote()),
}
}
}
Ok(())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.arg(
Arg::new(OPT_DATE)
.short('d')
.long(OPT_DATE)
.takes_value(true)
.help("display time described by STRING, not 'now'"),
)
.arg(
Arg::new(OPT_FILE)
.short('f')
.long(OPT_FILE)
.takes_value(true)
.help("like --date; once for each line of DATEFILE"),
)
.arg(
Arg::new(OPT_ISO_8601)
.short('I')
.long(OPT_ISO_8601)
.takes_value(true)
.help(ISO_8601_HELP_STRING),
)
.arg(
Arg::new(OPT_RFC_EMAIL)
.short('R')
.long(OPT_RFC_EMAIL)
.help(RFC_5322_HELP_STRING),
)
.arg(
Arg::new(OPT_RFC_3339)
.long(OPT_RFC_3339)
.takes_value(true)
.help(RFC_3339_HELP_STRING),
)
.arg(
Arg::new(OPT_DEBUG)
.long(OPT_DEBUG)
.help("annotate the parsed date, and warn about questionable usage to stderr"),
)
.arg(
Arg::new(OPT_REFERENCE)
.short('r')
.long(OPT_REFERENCE)
.takes_value(true)
.help("display the last modification time of FILE"),
)
.arg(
Arg::new(OPT_SET)
.short('s')
.long(OPT_SET)
.takes_value(true)
.help(OPT_SET_HELP_STRING),
)
.arg(
Arg::new(OPT_UNIVERSAL)
.short('u')
.long(OPT_UNIVERSAL)
.alias(OPT_UNIVERSAL_2)
.help("print or set Coordinated Universal Time (UTC)"),
)
.arg(Arg::new(OPT_FORMAT).multiple_occurrences(false))
}
/// Return the appropriate format string for the given settings.
fn make_format_string(settings: &Settings) -> &str {
match settings.format {
Format::Iso8601(ref fmt) => match *fmt {
Iso8601Format::Date => "%F",
Iso8601Format::Hours => "%FT%H%:z",
Iso8601Format::Minutes => "%FT%H:%M%:z",
Iso8601Format::Seconds => "%FT%T%:z",
Iso8601Format::Ns => "%FT%T,%f%:z",
},
Format::Rfc5322 => "%a, %d %h %Y %T %z",
Format::Rfc3339(ref fmt) => match *fmt {
Rfc3339Format::Date => "%F",
Rfc3339Format::Seconds => "%F %T%:z",
Rfc3339Format::Ns => "%F %T.%f%:z",
},
Format::Custom(ref fmt) => fmt,
Format::Default => "%c",
}
}
/// Parse a `String` into a `DateTime`.
/// If it fails, return a tuple of the `String` along with its `ParseError`.
fn parse_date<S: AsRef<str> + Clone>(
s: S,
) -> Result<DateTime<FixedOffset>, (String, chrono::format::ParseError)> {
// TODO: The GNU date command can parse a wide variety of inputs.
s.as_ref().parse().map_err(|e| (s.as_ref().into(), e))
}
#[cfg(not(any(unix, windows)))]
fn set_system_datetime(_date: DateTime<Utc>) -> UResult<()> {
unimplemented!("setting date not implemented (unsupported target)");
}
#[cfg(target_os = "macos")]
fn set_system_datetime(_date: DateTime<Utc>) -> UResult<()> {
Err(USimpleError::new(
1,
"setting the date is not supported by macOS".to_string(),
))
}
#[cfg(target_os = "redox")]
fn set_system_datetime(_date: DateTime<Utc>) -> UResult<()> {
Err(USimpleError::new(
1,
"setting the date is not supported by Redox".to_string(),
))
}
#[cfg(all(unix, not(target_os = "macos"), not(target_os = "redox")))]
/// System call to set date (unix).
/// See here for more:
/// https://doc.rust-lang.org/libc/i686-unknown-linux-gnu/libc/fn.clock_settime.html
/// https://linux.die.net/man/3/clock_settime
/// https://www.gnu.org/software/libc/manual/html_node/Time-Types.html
fn set_system_datetime(date: DateTime<Utc>) -> UResult<()> {
let timespec = timespec {
tv_sec: date.timestamp() as _,
tv_nsec: date.timestamp_subsec_nanos() as _,
};
let result = unsafe { clock_settime(CLOCK_REALTIME, ×pec) };
if result != 0 {
Err(std::io::Error::last_os_error().map_err_context(|| "cannot set date".to_string()))
} else {
Ok(())
}
}
#[cfg(windows)]
/// System call to set date (Windows).
/// See here for more:
/// https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-setsystemtime
/// https://docs.microsoft.com/en-us/windows/win32/api/minwinbase/ns-minwinbase-systemtime
fn set_system_datetime(date: DateTime<Utc>) -> UResult<()> {
let system_time = SYSTEMTIME {
wYear: date.year() as WORD,
wMonth: date.month() as WORD,
// Ignored
wDayOfWeek: 0,
wDay: date.day() as WORD,
wHour: date.hour() as WORD,
wMinute: date.minute() as WORD,
wSecond: date.second() as WORD,
// TODO: be careful of leap seconds - valid range is [0, 999] - how to handle?
wMilliseconds: ((date.nanosecond() / 1_000_000) % 1000) as WORD,
};
let result = unsafe { SetSystemTime(&system_time) };
if result == 0 {
Err(std::io::Error::last_os_error().map_err_context(|| "cannot set date".to_string()))
} else {
Ok(())
}
}
| 31.616471 | 107 | 0.566868 |
2923ab9ab244f71f71e095c65ee1333cc0fee53d | 11,528 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @generated SignedSource<<f02beeaca80acabc17b06041f998b548>>
*/
mod refetchable_fragment;
use refetchable_fragment::transform_fixture;
use fixture_tests::test_fixture;
#[test]
fn fragment_on_interface_which_implmentations_implement_node() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implmentations-implement-node.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implmentations-implement-node.expected");
test_fixture(transform_fixture, "fragment-on-interface-which-implmentations-implement-node.graphql", "refetchable_fragment/fixtures/fragment-on-interface-which-implmentations-implement-node.expected", input, expected);
}
#[test]
fn fragment_on_interface_which_implmentations_not_implement_node_invalid() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implmentations-not-implement-node.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implmentations-not-implement-node.invalid.expected");
test_fixture(transform_fixture, "fragment-on-interface-which-implmentations-not-implement-node.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-interface-which-implmentations-not-implement-node.invalid.expected", input, expected);
}
#[test]
fn fragment_on_interface_without_id() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-interface-without-id.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-interface-without-id.expected");
test_fixture(transform_fixture, "fragment-on-interface-without-id.graphql", "refetchable_fragment/fixtures/fragment-on-interface-without-id.expected", input, expected);
}
#[test]
fn fragment_on_node_interface() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface.expected");
test_fixture(transform_fixture, "fragment-on-node-interface.graphql", "refetchable_fragment/fixtures/fragment-on-node-interface.expected", input, expected);
}
#[test]
fn fragment_on_node_interface_without_id() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface-without-id.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected");
test_fixture(transform_fixture, "fragment-on-node-interface-without-id.graphql", "refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected", input, expected);
}
#[test]
fn fragment_on_node_with_id_argument_used_invalid() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.expected");
test_fixture(transform_fixture, "fragment-on-node-with-id-argument-used.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.expected", input, expected);
}
#[test]
fn fragment_on_non_node_fetchable_type() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected");
test_fixture(transform_fixture, "fragment-on-non-node-fetchable-type.graphql", "refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected", input, expected);
}
#[test]
fn fragment_on_object_implementing_node_interface() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected");
test_fixture(transform_fixture, "fragment-on-object-implementing-node-interface.graphql", "refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected);
}
#[test]
fn fragment_on_object_implementing_node_interface_with_alias_id() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected");
test_fixture(transform_fixture, "fragment-on-object-implementing-node-interface-with-alias-id.graphql", "refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected", input, expected);
}
#[test]
fn fragment_on_query() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-query.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-query.expected");
test_fixture(transform_fixture, "fragment-on-query.graphql", "refetchable_fragment/fixtures/fragment-on-query.expected", input, expected);
}
#[test]
fn fragment_on_query_with_cycle() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-query-with-cycle.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected");
test_fixture(transform_fixture, "fragment-on-query-with-cycle.graphql", "refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected", input, expected);
}
#[test]
fn fragment_on_query_without_query_name_invalid() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.expected");
test_fixture(transform_fixture, "fragment-on-query-without-query-name.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.expected", input, expected);
}
#[test]
fn fragment_on_viewer() {
let input = include_str!("refetchable_fragment/fixtures/fragment-on-viewer.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-on-viewer.expected");
test_fixture(transform_fixture, "fragment-on-viewer.graphql", "refetchable_fragment/fixtures/fragment-on-viewer.expected", input, expected);
}
#[test]
fn fragment_with_args_on_object_implementing_node_interface() {
let input = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected");
test_fixture(transform_fixture, "fragment-with-args-on-object-implementing-node-interface.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected", input, expected);
}
#[test]
fn fragment_with_args_on_query() {
let input = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-query.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-query.expected");
test_fixture(transform_fixture, "fragment-with-args-on-query.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-query.expected", input, expected);
}
#[test]
fn fragment_with_args_on_viewer() {
let input = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-viewer.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected");
test_fixture(transform_fixture, "fragment-with-args-on-viewer.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected", input, expected);
}
#[test]
fn fragment_with_relay_plural_invalid() {
let input = include_str!("refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.expected");
test_fixture(transform_fixture, "fragment-with-relay-plural.invalid.graphql", "refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.expected", input, expected);
}
#[test]
fn refetchable_fragment_with_connection() {
let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection.graphql");
let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected");
test_fixture(transform_fixture, "refetchable-fragment-with-connection.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected", input, expected);
}
#[test]
fn refetchable_fragment_with_connection_bidirectional() {
let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.graphql");
let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected");
test_fixture(transform_fixture, "refetchable-fragment-with-connection-bidirectional.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected", input, expected);
}
#[test]
fn refetchable_fragment_with_connection_literal_count_invalid() {
let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.expected");
test_fixture(transform_fixture, "refetchable-fragment-with-connection-literal-count.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.expected", input, expected);
}
#[test]
fn refetchable_fragment_with_connection_no_cursor_invalid() {
let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.expected");
test_fixture(transform_fixture, "refetchable-fragment-with-connection-no-cursor.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.expected", input, expected);
}
#[test]
fn refetchable_fragment_with_connection_unstable_path_invalid() {
let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.graphql");
let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.expected");
test_fixture(transform_fixture, "refetchable-fragment-with-connection-unstable-path.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.expected", input, expected);
}
#[test]
fn refetchable_fragment_with_connection_with_stream() {
let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.graphql");
let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected");
test_fixture(transform_fixture, "refetchable-fragment-with-connection-with-stream.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected", input, expected);
}
| 65.874286 | 246 | 0.807686 |
fcf0feb6e30d00652cfc4ca877bd6bd51fd51e7d | 1,731 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::cell::RefCell;
use std::rc::Rc;
use std::num::ToPrimitive;
#[derive(PartialEq, Debug)]
struct Point {
x: int,
y: int
}
pub fn main() {
assert_eq!(Rc::new(5u).to_uint(), Some(5));
assert_eq!((box &box &Rc::new(box box &box 5u)).to_uint(), Some(5));
let point = Rc::new(Point {x: 2, y: 4});
assert_eq!(point.x, 2);
assert_eq!(point.y, 4);
let i = Rc::new(RefCell::new(2));
let i_value = *i.borrow();
*i.borrow_mut() = 5;
assert_eq!((i_value, *i.borrow()), (2, 5));
let s = Rc::new("foo".to_string());
assert_eq!(&**s, "foo");
let mut_s = Rc::new(RefCell::new(String::from_str("foo")));
mut_s.borrow_mut().push_str("bar");
// HACK assert_eq! would panic here because it stores the LHS and RHS in two locals.
assert!(&**mut_s.borrow() == "foobar");
assert!(&**mut_s.borrow_mut() == "foobar");
let p = Rc::new(RefCell::new(Point {x: 1, y: 2}));
p.borrow_mut().x = 3;
p.borrow_mut().y += 3;
assert_eq!(*p.borrow(), Point {x: 3, y: 5});
let v = Rc::new(RefCell::new([1, 2, 3]));
v.borrow_mut()[0] = 3;
v.borrow_mut()[1] += 3;
assert_eq!((v.borrow()[0], v.borrow()[1], v.borrow()[2]), (3, 5, 3));
}
| 31.472727 | 88 | 0.611207 |
c1ad8226c27256282144c0c713518ba718f038ca | 411 | use crate::cmd::ProgResult;
use crate::man::MANUALS;
use std::io::{stdout, Write};
pub fn man(cmds: &[String]) -> ProgResult {
let out = stdout();
let mut out = out.lock();
cmds.iter().skip(1).for_each(|cmd| {
if !MANUALS.contains_key(cmd.as_str()) {
out.write_all(MANUALS[cmd.as_str()].as_bytes())
.expect("Cannot show manual");
}
});
Ok(())
}
| 24.176471 | 59 | 0.554745 |
d52fac604ba32625db43d80f529fc0ab98d0bc6a | 3,686 | use crate::alloc::boxed::Box;
use crate::base::RTTError::DeviceWriteFailed;
use crate::base::{CVoid, RTBaseError, RTTError};
use crate::device::common::*;
use core::mem;
struct PIN {
index: isize,
mode: Mode,
}
struct IRQPin {
pin: PIN,
irq_func: Option<Box<Box<dyn FnMut()>>>,
}
#[derive(Debug, PartialEq, Copy, Clone)]
enum Mode {
Output,
Input,
InputPullUp,
InputPullDown,
OutputOD,
}
#[derive(Debug)]
enum IRQMode {
Rising,
Falling,
RisingFalling,
HighLevel,
LowLevel,
}
#[derive(Debug)]
enum PinState {
Low,
High,
}
struct PinBuilder {
index: isize,
mode: Mode,
}
extern "C" {
fn rt_pin_write(pin: isize, val: isize);
fn rt_pin_read(pin: isize) -> i32;
fn rt_pin_attach_irq(
pin: i32,
mode: u32,
func: extern "C" fn(arg: *mut CVoid),
arg: *mut CVoid,
) -> RTBaseError;
fn rt_pin_detach_irq(pin: i32) -> RTBaseError;
fn rt_pin_mode(pin: isize, mode: isize);
fn rt_pin_irq_enable(pin: isize, mode: u32);
}
impl Open<PinBuilder> for PIN {
fn open(builder: &PinBuilder) -> Result<Self, RTTError> {
unsafe {
rt_pin_mode(builder.index, builder.mode as isize);
}
Ok(PIN {
index: builder.index,
mode: builder.mode,
})
}
}
impl PIN {
fn new(pin: isize) -> PinBuilder {
PinBuilder {
index: pin,
mode: Mode::Output,
}
}
fn pin_read(&self) -> Result<PinState, RTTError> {
unsafe {
Ok(if rt_pin_read(self.index) == 0 {
PinState::Low
} else {
PinState::High
})
}
}
fn pin_write(&self, val: PinState) -> Result<(), RTTError> {
return if self.mode == Mode::Input {
Err(DeviceWriteFailed)
} else {
unsafe {
rt_pin_write(self.index, val as isize);
}
Ok(())
};
}
fn irq(self) -> Result<IRQPin, RTTError> {
if self.mode == Mode::Output || self.mode == Mode::OutputOD {
return Err(RTTError::DeviceOpenFailed);
}
Ok(IRQPin {
pin: self,
irq_func: None,
})
}
}
impl IRQPin {
fn attach_irq<T>(&mut self, func: T, mode: IRQMode) -> &mut IRQPin
where
T: FnMut() + 'static,
{
Self::_attach_irq(self, Box::new(func), mode);
self
}
fn enable(&self) {
unsafe {
rt_pin_irq_enable(self.pin.index, 1);
}
}
fn disable(&self) {
unsafe {
rt_pin_irq_enable(self.pin.index, 0);
}
}
fn _attach_irq(&mut self, func: Box<dyn FnMut()>, mode: IRQMode) {
let p = Box::new(func);
let param = &*p as *const _ as *mut _;
extern "C" fn f(arg: *mut CVoid) {
unsafe {
let mut run = Box::from_raw(arg as *mut Box<dyn FnMut()>);
run();
mem::forget(run);
}
}
unsafe {
if 0 != rt_pin_attach_irq(self.pin.index as i32, mode as u32, f, param) {
self.irq_func = None;
return;
}
}
self.irq_func = Some(p);
}
}
impl Drop for IRQPin {
fn drop(&mut self) {
if let Some(_) = self.irq_func {
unsafe {
rt_pin_detach_irq(self.pin.index as i32);
}
}
}
}
impl PinBuilder {
fn mode(&mut self, m: Mode) -> &mut Self {
self.mode = m;
self
}
fn open(&self) -> Result<PIN, RTTError> {
PIN::open(&self)
}
}
| 21.183908 | 85 | 0.503527 |
2f2921498d1ae9e0301d836fb13872b1e6c7fdd8 | 1,882 | use super::show::ShowEthKeyCmd;
use crate::application::APP;
use abscissa_core::{clap::Parser, Application, Command, Runnable};
use k256::{pkcs8::ToPrivateKey, SecretKey};
///Import an Eth Key
#[derive(Command, Debug, Default, Parser)]
pub struct ImportEthKeyCmd {
pub args: Vec<String>,
#[clap(short, long)]
pub overwrite: bool,
#[clap(short, long)]
show_private_key: bool,
}
// Entry point for `gorc keys eth import [name] (private-key)`
// - [name] required; key name
// - (private-key) optional; when absent the user will be prompted to enter it
impl Runnable for ImportEthKeyCmd {
fn run(&self) {
let config = APP.config();
let keystore = &config.keystore;
let name = self.args.get(0).expect("name is required");
let name = name.parse().expect("Could not parse name");
if let Ok(_info) = keystore.info(&name) {
if !self.overwrite {
eprintln!("Key already exists, exiting.");
return;
}
}
let key = match self.args.get(1) {
Some(private_key) => private_key.clone(),
None => rpassword::read_password_from_tty(Some("> Enter your private-key:\n"))
.expect("Could not read private-key"),
};
let key = key
.parse::<clarity::PrivateKey>()
.expect("Could not parse private-key");
let key = SecretKey::from_bytes(key.to_bytes()).expect("Could not convert private-key");
let key = key
.to_pkcs8_der()
.expect("Could not PKCS8 encod private key");
keystore.store(&name, &key).expect("Could not store key");
let show_cmd = ShowEthKeyCmd {
args: vec![name.to_string()],
show_private_key: self.show_private_key,
show_name: false,
};
show_cmd.run();
}
}
| 30.852459 | 96 | 0.589798 |
db872b1eb15933c0fb16a24560df33f38c594990 | 41,753 | use std::fmt::{Display, Formatter};
use std::io::Error;
use std::{
collections::{BTreeMap, BTreeSet},
fmt::Debug,
};
use async_trait::async_trait;
use dml::{DmlMeta, DmlOperation, DmlWrite};
use futures::stream::BoxStream;
/// Generic boxed error type that is used in this crate.
///
/// The dynamic boxing makes it easier to deal with error from different implementations.
#[derive(Debug)]
pub struct WriteBufferError {
inner: Box<dyn std::error::Error + Sync + Send>,
kind: WriteBufferErrorKind,
}
impl WriteBufferError {
pub fn new(
kind: WriteBufferErrorKind,
e: impl Into<Box<dyn std::error::Error + Sync + Send>>,
) -> Self {
Self {
inner: e.into(),
kind,
}
}
pub fn invalid_data(e: impl Into<Box<dyn std::error::Error + Sync + Send>>) -> Self {
Self::new(WriteBufferErrorKind::InvalidData, e)
}
pub fn invalid_input(e: impl Into<Box<dyn std::error::Error + Sync + Send>>) -> Self {
Self::new(WriteBufferErrorKind::InvalidInput, e)
}
pub fn unknown_sequence_number(e: impl Into<Box<dyn std::error::Error + Sync + Send>>) -> Self {
Self::new(WriteBufferErrorKind::UnknownSequenceNumber, e)
}
/// Returns the kind of error this was
pub fn kind(&self) -> WriteBufferErrorKind {
self.kind
}
/// Returns the inner error
pub fn inner(&self) -> &dyn std::error::Error {
self.inner.as_ref()
}
}
impl Display for WriteBufferError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "WriteBufferError({:?}): {}", self.kind, self.inner)
}
}
impl std::error::Error for WriteBufferError {}
impl From<std::io::Error> for WriteBufferError {
fn from(e: Error) -> Self {
Self {
inner: Box::new(e),
kind: WriteBufferErrorKind::IO,
}
}
}
impl From<rskafka::client::error::Error> for WriteBufferError {
fn from(e: rskafka::client::error::Error) -> Self {
Self {
inner: Box::new(e),
kind: WriteBufferErrorKind::IO,
}
}
}
impl From<rskafka::client::producer::Error> for WriteBufferError {
fn from(e: rskafka::client::producer::Error) -> Self {
Self {
inner: Box::new(e),
kind: WriteBufferErrorKind::IO,
}
}
}
impl From<String> for WriteBufferError {
fn from(e: String) -> Self {
Self {
inner: e.into(),
kind: WriteBufferErrorKind::Unknown,
}
}
}
impl From<&'static str> for WriteBufferError {
fn from(e: &'static str) -> Self {
Self {
inner: e.into(),
kind: WriteBufferErrorKind::Unknown,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum WriteBufferErrorKind {
/// This operation failed for an unknown reason
Unknown,
/// This operation was provided with invalid input data
InvalidInput,
/// This operation encountered invalid data
InvalidData,
/// A fatal IO error occurred - non-fatal errors should be retried internally
IO,
/// The sequence number that we are trying to read is unknown.
UnknownSequenceNumber,
}
/// Writing to a Write Buffer takes a [`DmlWrite`] and returns the [`DmlMeta`] for the
/// payload that was written
#[async_trait]
pub trait WriteBufferWriting: Sync + Send + Debug + 'static {
/// List all known sequencers.
///
/// This set not empty.
fn sequencer_ids(&self) -> BTreeSet<u32>;
/// Send a [`DmlOperation`] to the write buffer using the specified sequencer ID.
///
/// The [`dml::DmlMeta`] will be propagated where applicable
///
/// This call may "async block" (i.e. be in a pending state) to accumulate multiple operations into a single batch.
/// After this method returns the operation was actually written (i.e. it is NOT buffered any longer). You may use
/// [`flush`](Self::flush) to trigger an early submission (e.g. before some linger time expired), which can be
/// helpful for controlled shutdown.
///
/// Returns the metadata that was written.
async fn store_operation(
&self,
sequencer_id: u32,
operation: &DmlOperation,
) -> Result<DmlMeta, WriteBufferError>;
/// Sends line protocol to the write buffer - primarily intended for testing
async fn store_lp(
&self,
sequencer_id: u32,
lp: &str,
default_time: i64,
) -> Result<DmlMeta, WriteBufferError> {
let tables = mutable_batch_lp::lines_to_batches(lp, default_time)
.map_err(WriteBufferError::invalid_input)?;
self.store_operation(
sequencer_id,
&DmlOperation::Write(DmlWrite::new("test_db", tables, Default::default())),
)
.await
}
/// Flush all currently blocking store operations ([`store_operation`](Self::store_operation) /
/// [`store_lp`](Self::store_lp)).
///
/// This call is pending while outstanding data is being submitted and will return AFTER the flush completed.
/// However you still need to poll the store operations to get the metadata for every write.
async fn flush(&self);
/// Return type (like `"mock"` or `"kafka"`) of this writer.
fn type_name(&self) -> &'static str;
}
/// Handles a stream of a specific sequencer.
///
/// This can be used to consume data via a stream or to seek the stream to a given sequence number.
#[async_trait]
pub trait WriteBufferStreamHandler: Sync + Send + Debug + 'static {
/// Stream that produces DML operations.
///
/// Note that due to the mutable borrow, it is not possible to have multiple streams from the same
/// [`WriteBufferStreamHandler`] instance at the same time. If all streams are dropped and requested again, the last
/// sequence number of the old streams will be the start sequence number for the new streams. If you want to
/// prevent that either create a new [`WriteBufferStreamHandler`] or use [`seek`](Self::seek).
///
/// If the sequence number that the stream wants to read is unknown (either because it is in the future or because
/// some retention policy removed it already), the stream will return an error with
/// [`WriteBufferErrorKind::UnknownSequenceNumber`] and will end immediately.
async fn stream(&mut self) -> BoxStream<'_, Result<DmlOperation, WriteBufferError>>;
/// Seek sequencer to given sequence number. The next output of related streams will be an entry with at least
/// the given sequence number (the actual sequence number might be skipped due to "holes" in the stream).
///
/// Note that due to the mutable borrow, it is not possible to seek while streams exists.
async fn seek(&mut self, sequence_number: u64) -> Result<(), WriteBufferError>;
}
/// Produce streams (one per sequencer) of [`DmlWrite`]s.
#[async_trait]
pub trait WriteBufferReading: Sync + Send + Debug + 'static {
/// List all known sequencers.
///
/// This set not empty.
fn sequencer_ids(&self) -> BTreeSet<u32>;
/// Get stream handler for a dedicated sequencer.
///
/// Handlers do NOT share any state (e.g. last sequence number).
async fn stream_handler(
&self,
sequencer_id: u32,
) -> Result<Box<dyn WriteBufferStreamHandler>, WriteBufferError>;
/// Get stream handlers for all stream.
async fn stream_handlers(
&self,
) -> Result<BTreeMap<u32, Box<dyn WriteBufferStreamHandler>>, WriteBufferError> {
let mut handlers = BTreeMap::new();
for sequencer_id in self.sequencer_ids() {
handlers.insert(sequencer_id, self.stream_handler(sequencer_id).await?);
}
Ok(handlers)
}
/// Get high watermark (= what we believe is the next sequence number to be added).
///
/// Can be used to calculate lag. Note that since the watermark is "next sequence ID number to be added", it starts
/// at 0 and after the entry with sequence number 0 is added to the buffer, it is 1.
async fn fetch_high_watermark(&self, sequencer_id: u32) -> Result<u64, WriteBufferError>;
/// Return type (like `"mock"` or `"kafka"`) of this reader.
fn type_name(&self) -> &'static str;
}
pub mod test_utils {
//! Generic tests for all write buffer implementations.
use crate::core::WriteBufferErrorKind;
use super::{
WriteBufferError, WriteBufferReading, WriteBufferStreamHandler, WriteBufferWriting,
};
use async_trait::async_trait;
use dml::{test_util::assert_write_op_eq, DmlMeta, DmlOperation, DmlWrite};
use futures::{stream::FuturesUnordered, Stream, StreamExt, TryStreamExt};
use std::{
collections::{BTreeSet, HashSet},
convert::TryFrom,
num::NonZeroU32,
sync::Arc,
time::Duration,
};
use time::{Time, TimeProvider};
use trace::{ctx::SpanContext, span::Span, RingBufferTraceCollector};
use uuid::Uuid;
/// Generated random topic name for testing.
pub fn random_topic_name() -> String {
format!("test_topic_{}", Uuid::new_v4())
}
/// Adapter to make a concrete write buffer implementation work w/ [`perform_generic_tests`].
#[async_trait]
pub trait TestAdapter: Send + Sync {
/// The context type that is used.
type Context: TestContext;
/// Create a new context.
///
/// This will be called multiple times during the test suite. Each resulting context must represent an isolated
/// environment.
async fn new_context(&self, n_sequencers: NonZeroU32) -> Self::Context {
self.new_context_with_time(n_sequencers, Arc::new(time::SystemProvider::new()))
.await
}
async fn new_context_with_time(
&self,
n_sequencers: NonZeroU32,
time_provider: Arc<dyn TimeProvider>,
) -> Self::Context;
}
/// Context used during testing.
///
/// Represents an isolated environment. Actions like sequencer creations and writes must not leak across context boundaries.
#[async_trait]
pub trait TestContext: Send + Sync {
/// Write buffer writer implementation specific to this context and adapter.
type Writing: WriteBufferWriting;
/// Write buffer reader implementation specific to this context and adapter.
type Reading: WriteBufferReading;
/// Create new writer.
async fn writing(&self, creation_config: bool) -> Result<Self::Writing, WriteBufferError>;
/// Create new reader.
async fn reading(&self, creation_config: bool) -> Result<Self::Reading, WriteBufferError>;
/// Trace collector that is used in this context.
fn trace_collector(&self) -> Arc<RingBufferTraceCollector>;
}
/// Generic test suite that must be passed by all proper write buffer implementations.
///
/// See [`TestAdapter`] for how to make a concrete write buffer implementation work with this test suite.
///
/// Note that you might need more tests on top of this to assert specific implementation behaviors, edge cases, and
/// error handling.
pub async fn perform_generic_tests<T>(adapter: T)
where
T: TestAdapter,
{
test_single_stream_io(&adapter).await;
test_multi_stream_io(&adapter).await;
test_multi_sequencer_io(&adapter).await;
test_multi_writer_multi_reader(&adapter).await;
test_seek(&adapter).await;
test_watermark(&adapter).await;
test_timestamp(&adapter).await;
test_sequencer_auto_creation(&adapter).await;
test_sequencer_ids(&adapter).await;
test_span_context(&adapter).await;
test_unknown_sequencer_write(&adapter).await;
test_multi_namespaces(&adapter).await;
test_flush(&adapter).await;
}
/// Writes line protocol and returns the [`DmlWrite`] that was written
pub async fn write(
namespace: &str,
writer: &impl WriteBufferWriting,
lp: &str,
sequencer_id: u32,
span_context: Option<&SpanContext>,
) -> DmlWrite {
let tables = mutable_batch_lp::lines_to_batches(lp, 0).unwrap();
let write = DmlWrite::new(
namespace,
tables,
DmlMeta::unsequenced(span_context.cloned()),
);
let operation = DmlOperation::Write(write);
let meta = writer
.store_operation(sequencer_id, &operation)
.await
.unwrap();
let mut write = match operation {
DmlOperation::Write(write) => write,
_ => unreachable!(),
};
write.set_meta(meta);
write
}
/// Test IO with a single writer and single reader stream.
///
/// This tests that:
/// - streams process data in order
/// - readers can handle the "pending" state w/o erroring
/// - readers unblock after being in "pending" state
async fn test_single_stream_io<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
let entry_1 = "upc user=1 100";
let entry_2 = "upc user=2 200";
let entry_3 = "upc user=3 300";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
let sequencer_id = set_pop_first(&mut reader.sequencer_ids()).unwrap();
let mut stream_handler = reader.stream_handler(sequencer_id).await.unwrap();
let mut stream = stream_handler.stream().await;
// empty stream is pending
assert_stream_pending(&mut stream).await;
// adding content allows us to get results
let w1 = write("namespace", &writer, entry_1, sequencer_id, None).await;
assert_write_op_eq(&stream.next().await.unwrap().unwrap(), &w1);
// stream is pending again
assert_stream_pending(&mut stream).await;
// adding more data unblocks the stream
let w2 = write("namespace", &writer, entry_2, sequencer_id, None).await;
let w3 = write("namespace", &writer, entry_3, sequencer_id, None).await;
assert_write_op_eq(&stream.next().await.unwrap().unwrap(), &w2);
assert_write_op_eq(&stream.next().await.unwrap().unwrap(), &w3);
// stream is pending again
assert_stream_pending(&mut stream).await;
}
/// Tests multiple subsequently created streams from a single [`WriteBufferStreamHandler`].
///
/// This tests that:
/// - readers remember their sequence number (and "pending" state) even when streams are dropped
/// - state is not shared between handlers
async fn test_multi_stream_io<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
let entry_1 = "upc user=1 100";
let entry_2 = "upc user=2 200";
let entry_3 = "upc user=3 300";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
let w1 = write("namespace", &writer, entry_1, 0, None).await;
let w2 = write("namespace", &writer, entry_2, 0, None).await;
let w3 = write("namespace", &writer, entry_3, 0, None).await;
// creating stream, drop stream, re-create it => still starts at first entry
let sequencer_id = set_pop_first(&mut reader.sequencer_ids()).unwrap();
let mut stream_handler = reader.stream_handler(sequencer_id).await.unwrap();
let stream = stream_handler.stream();
drop(stream);
let mut stream = stream_handler.stream().await;
assert_write_op_eq(&stream.next().await.unwrap().unwrap(), &w1);
// re-creating stream after reading remembers sequence number, but wait a bit to provoke the stream to buffer
// some entries
tokio::time::sleep(Duration::from_millis(10)).await;
drop(stream);
let mut stream = stream_handler.stream().await;
assert_write_op_eq(&stream.next().await.unwrap().unwrap(), &w2);
assert_write_op_eq(&stream.next().await.unwrap().unwrap(), &w3);
// re-creating stream after reading everything makes it pending
drop(stream);
let mut stream = stream_handler.stream().await;
assert_stream_pending(&mut stream).await;
// use a different handler => stream starts from beginning
let mut stream_handler2 = reader.stream_handler(sequencer_id).await.unwrap();
let mut stream2 = stream_handler2.stream().await;
assert_write_op_eq(&stream2.next().await.unwrap().unwrap(), &w1);
assert_stream_pending(&mut stream).await;
}
/// Test single reader-writer IO w/ multiple sequencers.
///
/// This tests that:
/// - writes go to and reads come from the right sequencer, aka that sequencers provide a namespace-like isolation
/// - "pending" states are specific to a sequencer
async fn test_multi_sequencer_io<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(2).unwrap()).await;
let entry_1 = "upc user=1 100";
let entry_2 = "upc user=2 200";
let entry_3 = "upc user=3 300";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
// check that we have two different sequencer IDs
let mut sequencer_ids = reader.sequencer_ids();
assert_eq!(sequencer_ids.len(), 2);
let sequencer_id_1 = set_pop_first(&mut sequencer_ids).unwrap();
let sequencer_id_2 = set_pop_first(&mut sequencer_ids).unwrap();
assert_ne!(sequencer_id_1, sequencer_id_2);
let mut stream_handler_1 = reader.stream_handler(sequencer_id_1).await.unwrap();
let mut stream_handler_2 = reader.stream_handler(sequencer_id_2).await.unwrap();
let mut stream_1 = stream_handler_1.stream().await;
let mut stream_2 = stream_handler_2.stream().await;
// empty streams are pending
assert_stream_pending(&mut stream_1).await;
assert_stream_pending(&mut stream_2).await;
// entries arrive at the right target stream
let w1 = write("namespace", &writer, entry_1, sequencer_id_1, None).await;
assert_write_op_eq(&stream_1.next().await.unwrap().unwrap(), &w1);
assert_stream_pending(&mut stream_2).await;
let w2 = write("namespace", &writer, entry_2, sequencer_id_2, None).await;
assert_stream_pending(&mut stream_1).await;
assert_write_op_eq(&stream_2.next().await.unwrap().unwrap(), &w2);
let w3 = write("namespace", &writer, entry_3, sequencer_id_1, None).await;
assert_stream_pending(&mut stream_2).await;
assert_write_op_eq(&stream_1.next().await.unwrap().unwrap(), &w3);
// streams are pending again
assert_stream_pending(&mut stream_1).await;
assert_stream_pending(&mut stream_2).await;
}
/// Test multiple multiple writers and multiple readers on multiple sequencers.
///
/// This tests that:
/// - writers retrieve consistent sequencer IDs
/// - writes go to and reads come from the right sequencer, similar
/// to [`test_multi_sequencer_io`] but less detailed
/// - multiple writers can write to a single sequencer
async fn test_multi_writer_multi_reader<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(2).unwrap()).await;
let entry_east_1 = "upc,region=east user=1 100";
let entry_east_2 = "upc,region=east user=2 200";
let entry_west_1 = "upc,region=west user=1 200";
let writer_1 = context.writing(true).await.unwrap();
let writer_2 = context.writing(true).await.unwrap();
let reader_1 = context.reading(true).await.unwrap();
let reader_2 = context.reading(true).await.unwrap();
let mut sequencer_ids_1 = writer_1.sequencer_ids();
let sequencer_ids_2 = writer_2.sequencer_ids();
assert_eq!(sequencer_ids_1, sequencer_ids_2);
assert_eq!(sequencer_ids_1.len(), 2);
let sequencer_id_1 = set_pop_first(&mut sequencer_ids_1).unwrap();
let sequencer_id_2 = set_pop_first(&mut sequencer_ids_1).unwrap();
let w_east_1 = write("namespace", &writer_1, entry_east_1, sequencer_id_1, None).await;
let w_west_1 = write("namespace", &writer_1, entry_west_1, sequencer_id_2, None).await;
let w_east_2 = write("namespace", &writer_2, entry_east_2, sequencer_id_1, None).await;
let mut handler_1_1 = reader_1.stream_handler(sequencer_id_1).await.unwrap();
let mut handler_1_2 = reader_1.stream_handler(sequencer_id_2).await.unwrap();
let mut handler_2_1 = reader_2.stream_handler(sequencer_id_1).await.unwrap();
let mut handler_2_2 = reader_2.stream_handler(sequencer_id_2).await.unwrap();
assert_reader_content(&mut handler_1_1, &[&w_east_1, &w_east_2]).await;
assert_reader_content(&mut handler_1_2, &[&w_west_1]).await;
assert_reader_content(&mut handler_2_1, &[&w_east_1, &w_east_2]).await;
assert_reader_content(&mut handler_2_2, &[&w_west_1]).await;
}
/// Test seek implemention of readers.
///
/// This tests that:
/// - seeking is specific to the reader AND sequencer
/// - forward and backwards seeking works
/// - seeking past the end of the known content works (results in "pending" status and remembers sequence number and
/// not just "next entry")
async fn test_seek<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(2).unwrap()).await;
let entry_east_1 = "upc,region=east user=1 100";
let entry_east_2 = "upc,region=east user=2 200";
let entry_east_3 = "upc,region=east user=3 300";
let entry_west_1 = "upc,region=west user=1 200";
let writer = context.writing(true).await.unwrap();
let mut sequencer_ids = writer.sequencer_ids();
let sequencer_id_1 = set_pop_first(&mut sequencer_ids).unwrap();
let sequencer_id_2 = set_pop_first(&mut sequencer_ids).unwrap();
let w_east_1 = write("namespace", &writer, entry_east_1, sequencer_id_1, None).await;
let w_east_2 = write("namespace", &writer, entry_east_2, sequencer_id_1, None).await;
let w_west_1 = write("namespace", &writer, entry_west_1, sequencer_id_2, None).await;
let reader_1 = context.reading(true).await.unwrap();
let reader_2 = context.reading(true).await.unwrap();
let mut handler_1_1_a = reader_1.stream_handler(sequencer_id_1).await.unwrap();
let mut handler_1_2_a = reader_1.stream_handler(sequencer_id_2).await.unwrap();
let mut handler_1_1_b = reader_1.stream_handler(sequencer_id_1).await.unwrap();
let mut handler_1_2_b = reader_1.stream_handler(sequencer_id_2).await.unwrap();
let mut handler_2_1 = reader_2.stream_handler(sequencer_id_1).await.unwrap();
let mut handler_2_2 = reader_2.stream_handler(sequencer_id_2).await.unwrap();
// forward seek
handler_1_1_a
.seek(w_east_2.meta().sequence().unwrap().number)
.await
.unwrap();
assert_reader_content(&mut handler_1_1_a, &[&w_east_2]).await;
assert_reader_content(&mut handler_1_2_a, &[&w_west_1]).await;
assert_reader_content(&mut handler_1_1_b, &[&w_east_1, &w_east_2]).await;
assert_reader_content(&mut handler_1_2_b, &[&w_west_1]).await;
assert_reader_content(&mut handler_2_1, &[&w_east_1, &w_east_2]).await;
assert_reader_content(&mut handler_2_2, &[&w_west_1]).await;
// backward seek
handler_1_1_a.seek(0).await.unwrap();
assert_reader_content(&mut handler_1_1_a, &[&w_east_1, &w_east_2]).await;
// seek to far end and then add data
// The affected stream should error and then stop. The other streams should still be pending.
handler_1_1_a.seek(1_000_000).await.unwrap();
let w_east_3 = write("namespace", &writer, entry_east_3, 0, None).await;
let err = handler_1_1_a
.stream()
.await
.next()
.await
.expect("stream not ended")
.unwrap_err();
assert_eq!(err.kind(), WriteBufferErrorKind::UnknownSequenceNumber);
assert!(handler_1_1_a.stream().await.next().await.is_none());
assert_stream_pending(&mut handler_1_2_a.stream().await).await;
assert_reader_content(&mut handler_1_1_b, &[&w_east_3]).await;
assert_stream_pending(&mut handler_1_2_b.stream().await).await;
assert_reader_content(&mut handler_2_1, &[&w_east_3]).await;
assert_stream_pending(&mut handler_2_2.stream().await).await;
// seeking again should recover the stream
handler_1_1_a.seek(0).await.unwrap();
assert_reader_content(&mut handler_1_1_a, &[&w_east_1, &w_east_2, &w_east_3]).await;
}
/// Test watermark fetching.
///
/// This tests that:
/// - watermarks for empty sequencers is 0
/// - watermarks for non-empty sequencers is "last sequence ID plus 1"
async fn test_watermark<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(2).unwrap()).await;
let entry_east_1 = "upc,region=east user=1 100";
let entry_east_2 = "upc,region=east user=2 200";
let entry_west_1 = "upc,region=west user=1 200";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
let mut sequencer_ids = writer.sequencer_ids();
let sequencer_id_1 = set_pop_first(&mut sequencer_ids).unwrap();
let sequencer_id_2 = set_pop_first(&mut sequencer_ids).unwrap();
// start at watermark 0
assert_eq!(
reader.fetch_high_watermark(sequencer_id_1).await.unwrap(),
0
);
assert_eq!(
reader.fetch_high_watermark(sequencer_id_2).await.unwrap(),
0
);
// high water mark moves
write("namespace", &writer, entry_east_1, sequencer_id_1, None).await;
let w1 = write("namespace", &writer, entry_east_2, sequencer_id_1, None).await;
let w2 = write("namespace", &writer, entry_west_1, sequencer_id_2, None).await;
assert_eq!(
reader.fetch_high_watermark(sequencer_id_1).await.unwrap(),
w1.meta().sequence().unwrap().number + 1
);
assert_eq!(
reader.fetch_high_watermark(sequencer_id_2).await.unwrap(),
w2.meta().sequence().unwrap().number + 1
);
}
/// Test that timestamps reported by the readers are sane.
async fn test_timestamp<T>(adapter: &T)
where
T: TestAdapter,
{
// Note: Roundtrips are only guaranteed for millisecond-precision
let t0 = Time::from_timestamp_millis(129);
let time = Arc::new(time::MockProvider::new(t0));
let context = adapter
.new_context_with_time(
NonZeroU32::try_from(1).unwrap(),
Arc::<time::MockProvider>::clone(&time),
)
.await;
let entry = "upc user=1 100";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
let mut sequencer_ids = writer.sequencer_ids();
assert_eq!(sequencer_ids.len(), 1);
let sequencer_id = set_pop_first(&mut sequencer_ids).unwrap();
let write = write("namespace", &writer, entry, sequencer_id, None).await;
let reported_ts = write.meta().producer_ts().unwrap();
// advance time
time.inc(Duration::from_secs(10));
// check that the timestamp records the ingestion time, not the read time
let mut handler = reader.stream_handler(sequencer_id).await.unwrap();
let sequenced_entry = handler.stream().await.next().await.unwrap().unwrap();
let ts_entry = sequenced_entry.meta().producer_ts().unwrap();
assert_eq!(ts_entry, t0);
assert_eq!(reported_ts, t0);
}
/// Test that sequencer auto-creation works.
///
/// This tests that:
/// - both writer and reader cannot be constructed when sequencers are missing
/// - both writer and reader can be auto-create sequencers
async fn test_sequencer_auto_creation<T>(adapter: &T)
where
T: TestAdapter,
{
// fail when sequencers are missing
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
context.writing(false).await.unwrap_err();
context.reading(false).await.unwrap_err();
// writer can create sequencers
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
context.writing(true).await.unwrap();
context.writing(false).await.unwrap();
context.reading(false).await.unwrap();
// reader can create sequencers
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
context.reading(true).await.unwrap();
context.reading(false).await.unwrap();
context.writing(false).await.unwrap();
}
/// Test sequencer IDs reporting of readers and writers.
///
/// This tests that:
/// - all sequencers are reported
async fn test_sequencer_ids<T>(adapter: &T)
where
T: TestAdapter,
{
let n_sequencers = 10;
let context = adapter
.new_context(NonZeroU32::try_from(n_sequencers).unwrap())
.await;
let writer_1 = context.writing(true).await.unwrap();
let writer_2 = context.writing(true).await.unwrap();
let reader_1 = context.reading(true).await.unwrap();
let reader_2 = context.reading(true).await.unwrap();
let sequencer_ids_1 = writer_1.sequencer_ids();
let sequencer_ids_2 = writer_2.sequencer_ids();
let sequencer_ids_3 = reader_1.sequencer_ids();
let sequencer_ids_4 = reader_2.sequencer_ids();
assert_eq!(sequencer_ids_1.len(), n_sequencers as usize);
assert_eq!(sequencer_ids_1, sequencer_ids_2);
assert_eq!(sequencer_ids_1, sequencer_ids_3);
assert_eq!(sequencer_ids_1, sequencer_ids_4);
}
/// Test that span contexts are propagated through the system.
async fn test_span_context<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
let entry = "upc user=1 100";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
let mut sequencer_ids = writer.sequencer_ids();
assert_eq!(sequencer_ids.len(), 1);
let sequencer_id = set_pop_first(&mut sequencer_ids).unwrap();
let mut handler = reader.stream_handler(sequencer_id).await.unwrap();
let mut stream = handler.stream().await;
// 1: no context
write("namespace", &writer, entry, sequencer_id, None).await;
// check write 1
let write_1 = stream.next().await.unwrap().unwrap();
assert!(write_1.meta().span_context().is_none());
// no spans emitted yet
let collector = context.trace_collector();
assert!(collector.spans().is_empty());
// 2: some context
let span_context_1 = SpanContext::new(Arc::clone(&collector) as Arc<_>);
write(
"namespace",
&writer,
entry,
sequencer_id,
Some(&span_context_1),
)
.await;
// 2: another context
let span_context_parent = SpanContext::new(Arc::clone(&collector) as Arc<_>);
let span_context_2 = span_context_parent.child("foo").ctx;
write(
"namespace",
&writer,
entry,
sequencer_id,
Some(&span_context_2),
)
.await;
// check write 2
let write_2 = stream.next().await.unwrap().unwrap();
let actual_context_1 = write_2.meta().span_context().unwrap();
assert_span_context_eq_or_linked(&span_context_1, actual_context_1, collector.spans());
// check write 3
let write_3 = stream.next().await.unwrap().unwrap();
let actual_context_2 = write_3.meta().span_context().unwrap();
assert_span_context_eq_or_linked(&span_context_2, actual_context_2, collector.spans());
// check that links / parents make sense
assert_span_relations_closed(&collector.spans(), &[span_context_1, span_context_2]);
}
/// Test that writing to an unknown sequencer produces an error
async fn test_unknown_sequencer_write<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
let tables = mutable_batch_lp::lines_to_batches("upc user=1 100", 0).unwrap();
let write = DmlWrite::new("foo", tables, Default::default());
let operation = DmlOperation::Write(write);
let writer = context.writing(true).await.unwrap();
// flip bits to get an unknown sequencer
let sequencer_id = !set_pop_first(&mut writer.sequencer_ids()).unwrap();
writer
.store_operation(sequencer_id, &operation)
.await
.unwrap_err();
}
/// Test usage w/ multiple namespaces.
///
/// Tests that:
/// - namespace names or propagated correctly from writer to reader
/// - all namespaces end up in a single stream
async fn test_multi_namespaces<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
let entry_1 = "upc,region=east user=1 100";
let entry_2 = "upc,region=east user=2 200";
let writer = context.writing(true).await.unwrap();
let reader = context.reading(true).await.unwrap();
let mut sequencer_ids = writer.sequencer_ids();
assert_eq!(sequencer_ids.len(), 1);
let sequencer_id = set_pop_first(&mut sequencer_ids).unwrap();
let w1 = write("namespace_1", &writer, entry_2, sequencer_id, None).await;
let w2 = write("namespace_2", &writer, entry_1, sequencer_id, None).await;
let mut handler = reader.stream_handler(sequencer_id).await.unwrap();
assert_reader_content(&mut handler, &[&w1, &w2]).await;
}
/// Dummy test to ensure that flushing somewhat works.
async fn test_flush<T>(adapter: &T)
where
T: TestAdapter,
{
let context = adapter.new_context(NonZeroU32::try_from(1).unwrap()).await;
let writer = Arc::new(context.writing(true).await.unwrap());
let mut sequencer_ids = writer.sequencer_ids();
assert_eq!(sequencer_ids.len(), 1);
let sequencer_id = set_pop_first(&mut sequencer_ids).unwrap();
let mut write_tasks: FuturesUnordered<_> = (0..20)
.map(|i| {
let writer = Arc::clone(&writer);
async move {
let entry = format!("upc,region=east user={} {}", i, i);
write("ns", writer.as_ref(), &entry, sequencer_id, None).await;
}
})
.collect();
let write_tasks = tokio::spawn(async move { while write_tasks.next().await.is_some() {} });
tokio::time::sleep(Duration::from_millis(1)).await;
writer.flush().await;
tokio::time::timeout(Duration::from_millis(1_000), write_tasks)
.await
.unwrap()
.unwrap();
}
/// Assert that the content of the reader is as expected.
///
/// This will read `expected_writes.len()` from the reader and then ensures that the stream is pending.
async fn assert_reader_content(
actual_stream_handler: &mut Box<dyn WriteBufferStreamHandler>,
expected_writes: &[&DmlWrite],
) {
let actual_stream = actual_stream_handler.stream().await;
// we need to limit the stream to `expected_writes.len()` elements, otherwise it might be pending forever
let actual_writes: Vec<_> = actual_stream
.take(expected_writes.len())
.try_collect()
.await
.unwrap();
assert_eq!(actual_writes.len(), expected_writes.len());
for (actual, expected) in actual_writes.iter().zip(expected_writes.iter()) {
assert_write_op_eq(actual, expected);
}
// Ensure that stream is pending
let mut actual_stream = actual_stream_handler.stream().await;
assert_stream_pending(&mut actual_stream).await;
}
/// Asserts that given span context are the same or that `second` links back to `first`.
///
/// "Same" means:
/// - identical trace ID
/// - identical span ID
/// - identical parent span ID
pub(crate) fn assert_span_context_eq_or_linked(
first: &SpanContext,
second: &SpanContext,
spans: Vec<Span>,
) {
// search for links
for span in spans {
if (span.ctx.trace_id == second.trace_id) && (span.ctx.span_id == second.span_id) {
// second context was emitted as span
// check if it links to first context
for (trace_id, span_id) in span.ctx.links {
if (trace_id == first.trace_id) && (span_id == first.span_id) {
return;
}
}
}
}
// no link found
assert_eq!(first.trace_id, second.trace_id);
assert_eq!(first.span_id, second.span_id);
assert_eq!(first.parent_span_id, second.parent_span_id);
}
/// Assert that all span relations (parents, links) are found within the set of spans or within the set of roots.
fn assert_span_relations_closed(spans: &[Span], roots: &[SpanContext]) {
let all_ids: HashSet<_> = spans
.iter()
.map(|span| (span.ctx.trace_id, span.ctx.span_id))
.chain(roots.iter().map(|ctx| (ctx.trace_id, ctx.span_id)))
.collect();
for span in spans {
if let Some(parent_span_id) = span.ctx.parent_span_id {
assert!(all_ids.contains(&(span.ctx.trace_id, parent_span_id)));
}
for link in &span.ctx.links {
assert!(all_ids.contains(link));
}
}
}
/// Assert that given stream is pending.
///
/// This will will try to poll the stream for a bit to ensure that async IO has a chance to catch up.
async fn assert_stream_pending<S>(stream: &mut S)
where
S: Stream + Send + Unpin,
S::Item: std::fmt::Debug,
{
tokio::select! {
e = stream.next() => panic!("stream is not pending, yielded: {e:?}"),
_ = tokio::time::sleep(Duration::from_millis(10)) => {},
};
}
/// Pops first entry from set.
///
/// Helper until <https://github.com/rust-lang/rust/issues/62924> is stable.
pub(crate) fn set_pop_first<T>(set: &mut BTreeSet<T>) -> Option<T>
where
T: Clone + Ord,
{
set.iter().next().cloned().map(|k| set.take(&k)).flatten()
}
/// Get the testing Kafka connection string or return current scope.
///
/// If `TEST_INTEGRATION` and `KAFKA_CONNECT` are set, return the Kafka connection URL to the
/// caller.
///
/// If `TEST_INTEGRATION` is set but `KAFKA_CONNECT` is not set, fail the tests and provide
/// guidance for setting `KAFKA_CONNECTION`.
///
/// If `TEST_INTEGRATION` is not set, skip the calling test by returning early.
#[macro_export]
macro_rules! maybe_skip_kafka_integration {
() => {{
use std::env;
dotenv::dotenv().ok();
match (
env::var("TEST_INTEGRATION").is_ok(),
env::var("KAFKA_CONNECT").ok(),
) {
(true, Some(kafka_connection)) => kafka_connection,
(true, None) => {
panic!(
"TEST_INTEGRATION is set which requires running integration tests, but \
KAFKA_CONNECT is not set. Please run Kafka, perhaps by using the command \
`docker-compose -f docker/ci-kafka-docker-compose.yml up kafka`, then \
set KAFKA_CONNECT to the host and port where Kafka is accessible. If \
running the `docker-compose` command and the Rust tests on the host, the \
value for `KAFKA_CONNECT` should be `localhost:9093`. If running the Rust \
tests in another container in the `docker-compose` network as on CI, \
`KAFKA_CONNECT` should be `kafka:9092`."
)
}
(false, Some(_)) => {
eprintln!("skipping Kafka integration tests - set TEST_INTEGRATION to run");
return;
}
(false, None) => {
eprintln!(
"skipping Kafka integration tests - set TEST_INTEGRATION and KAFKA_CONNECT to \
run"
);
return;
}
}
}};
}
}
| 38.84 | 128 | 0.62841 |
f5ff8f3381ddf2ac87e968c54c91bcc2ff8e4ac2 | 17,734 | use std::mem::take;
use serde::{Deserialize, Serialize};
use swc_atoms::{js_word, JsWord};
use swc_common::{util::take::Take, Mark, Spanned, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_utils::{
contains_this_expr, prepend, private_ident, quote_ident, quote_str, ExprFactory, StmtLike,
};
use swc_ecma_visit::{
as_folder, noop_visit_mut_type, noop_visit_type, Fold, Visit, VisitMut, VisitMutWith, VisitWith,
};
use swc_trace_macro::swc_trace;
use self::{case::CaseHandler, hoist::hoist};
mod case;
mod hoist;
mod leap;
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
/// Import path used instead of `regenerator-runtime`
#[serde(default)]
pub import_path: Option<JsWord>,
}
#[tracing::instrument(level = "info", skip_all)]
pub fn regenerator(config: Config, top_level_mark: Mark) -> impl Fold + VisitMut {
as_folder(Regenerator {
config,
top_level_mark,
regenerator_runtime: Default::default(),
top_level_vars: Default::default(),
})
}
#[derive(Debug)]
struct Regenerator {
config: Config,
top_level_mark: Mark,
/// [Some] if used.
regenerator_runtime: Option<Ident>,
/// mark
top_level_vars: Vec<VarDeclarator>,
}
fn require_rt(global_mark: Mark, rt: Ident, src: Option<JsWord>) -> Stmt {
Stmt::Decl(Decl::Var(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
declare: false,
decls: vec![VarDeclarator {
span: DUMMY_SP,
name: rt.into(),
init: Some(Box::new(Expr::Call(CallExpr {
span: DUMMY_SP,
callee: quote_ident!(DUMMY_SP.apply_mark(global_mark), "require").as_callee(),
args: vec![src.unwrap_or_else(|| "regenerator-runtime".into()).as_arg()],
type_args: Default::default(),
}))),
definite: false,
}],
}))
}
#[swc_trace]
impl Regenerator {
fn visit_mut_stmt_like<T>(&mut self, items: &mut Vec<T>)
where
T: VisitMutWith<Self> + StmtLike,
Vec<T>: VisitMutWith<Self> + VisitWith<Finder>,
{
if !Finder::find(items) {
return;
}
let mut new = Vec::with_capacity(items.len() + 2);
for mut item in items.drain(..) {
item.visit_mut_children_with(self);
if !self.top_level_vars.is_empty() {
prepend(
&mut new,
T::from_stmt(Stmt::Decl(Decl::Var(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
declare: false,
decls: take(&mut self.top_level_vars),
}))),
);
}
new.push(item);
}
*items = new;
}
}
#[swc_trace]
impl VisitMut for Regenerator {
noop_visit_mut_type!();
fn visit_mut_expr(&mut self, e: &mut Expr) {
if !Finder::find(e) {
return;
}
e.visit_mut_children_with(self);
if let Expr::Fn(FnExpr {
ident, function, ..
}) = e
{
if function.is_generator {
let marked = ident.clone().unwrap_or_else(|| private_ident!("_callee"));
let ident = self.visit_mut_fn(
Some(ident.take().unwrap_or_else(|| marked.clone())),
marked,
function,
);
*e = Expr::Call(CallExpr {
span: DUMMY_SP,
callee: self
.regenerator_runtime
.clone()
.unwrap()
.make_member(quote_ident!("mark"))
.as_callee(),
args: vec![FnExpr {
ident,
function: function.take(),
}
.as_arg()],
type_args: None,
});
}
}
}
fn visit_mut_fn_decl(&mut self, f: &mut FnDecl) {
if !Finder::find(f) {
return;
}
if self.regenerator_runtime.is_none() {
self.regenerator_runtime = Some(private_ident!("regeneratorRuntime"));
}
f.visit_mut_children_with(self);
if f.function.is_generator {
let marked = private_ident!("_marked");
self.top_level_vars.push(VarDeclarator {
span: DUMMY_SP,
name: marked.clone().into(),
init: Some(Box::new(Expr::Call(CallExpr {
span: DUMMY_SP,
callee: self
.regenerator_runtime
.clone()
.unwrap()
.make_member(quote_ident!("mark"))
.as_callee(),
args: vec![f.ident.clone().as_arg()],
type_args: None,
}))),
definite: false,
});
let i = self.visit_mut_fn(Some(f.ident.take()), marked, &mut f.function);
f.ident = i.unwrap();
}
}
fn visit_mut_module(&mut self, m: &mut Module) {
m.visit_mut_children_with(self);
if let Some(rt_ident) = self.regenerator_runtime.take() {
let specifier = ImportSpecifier::Default(ImportDefaultSpecifier {
span: DUMMY_SP,
local: rt_ident,
});
prepend(
&mut m.body,
ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {
span: DUMMY_SP,
specifiers: vec![specifier],
src: quote_str!(self
.config
.import_path
.clone()
.unwrap_or_else(|| "regenerator-runtime".into())),
type_only: Default::default(),
asserts: Default::default(),
})),
);
}
}
fn visit_mut_module_decl(&mut self, i: &mut ModuleDecl) {
if !Finder::find(i) {
return;
}
i.visit_mut_children_with(self);
if let ModuleDecl::ExportDefaultDecl(ExportDefaultDecl {
span,
decl: DefaultDecl::Fn(FnExpr {
ident, function, ..
}),
}) = i
{
let marked = ident.clone().unwrap_or_else(|| private_ident!("_callee"));
let ident = self.visit_mut_fn(
Some(ident.take().unwrap_or_else(|| marked.clone())),
marked,
function,
);
*i = ModuleDecl::ExportDefaultDecl(ExportDefaultDecl {
span: *span,
decl: DefaultDecl::Fn(FnExpr {
ident,
function: function.take(),
}),
});
}
}
fn visit_mut_prop(&mut self, p: &mut Prop) {
p.visit_mut_children_with(self);
if let Prop::Method(p) = p {
if !p.function.is_generator {
return;
}
let marked = private_ident!("_callee");
let ident = self.visit_mut_fn(Some(marked.clone()), marked, &mut p.function);
let mark_expr = Expr::Call(CallExpr {
span: DUMMY_SP,
callee: self
.regenerator_runtime
.clone()
.unwrap()
.make_member(quote_ident!("mark"))
.as_callee(),
args: vec![FnExpr {
ident,
function: p.function.take(),
}
.as_arg()],
type_args: None,
});
p.function = Function {
span: DUMMY_SP,
params: vec![],
decorators: vec![],
body: Some(BlockStmt {
span: DUMMY_SP,
stmts: vec![ReturnStmt {
span: DUMMY_SP,
arg: Some(Box::new(
CallExpr {
span: DUMMY_SP,
callee: mark_expr.as_callee(),
args: vec![],
type_args: Default::default(),
}
.into(),
)),
}
.into()],
}),
is_generator: false,
is_async: false,
type_params: None,
return_type: None,
};
}
}
/// Injects `var _regeneratorRuntime = require('regenerator-runtime');`
fn visit_mut_script(&mut self, s: &mut Script) {
s.visit_mut_children_with(self);
if let Some(rt_ident) = self.regenerator_runtime.take() {
prepend(
&mut s.body,
require_rt(
self.top_level_mark,
rt_ident,
self.config.import_path.clone(),
),
);
}
}
/// Injects `var _regeneratorRuntime = require('regenerator-runtime');`
fn visit_mut_module_items(&mut self, n: &mut Vec<ModuleItem>) {
self.visit_mut_stmt_like(n);
}
fn visit_mut_stmts(&mut self, n: &mut Vec<Stmt>) {
self.visit_mut_stmt_like(n);
}
}
#[swc_trace]
impl Regenerator {
fn visit_mut_fn(
&mut self,
i: Option<Ident>,
marked_ident: Ident,
f: &mut Function,
) -> Option<Ident> {
if !f.is_generator || f.body.is_none() {
return i;
}
if self.regenerator_runtime.is_none() {
self.regenerator_runtime = Some(private_ident!("regeneratorRuntime"));
}
let body_span = f.body.span();
let inner_name = i
.as_ref()
.map(|i| Ident::new(format!("{}$", i.sym).into(), i.span))
.unwrap_or_else(|| private_ident!("ref$"));
let ctx = private_ident!("_ctx");
let mut handler = CaseHandler::new(&ctx);
// f.body
// .visit_mut_with(&mut FnSentVisitor { ctx: ctx.clone() });
let uses_this = contains_this_expr(&f.body);
let (body, hoister) = hoist(f.body.take().unwrap());
let mut outer_fn_vars = vec![];
outer_fn_vars.extend(hoister.vars.into_iter().map(|id| VarDeclarator {
span: DUMMY_SP,
name: id.into(),
init: None,
definite: false,
}));
outer_fn_vars.extend(hoister.arguments.into_iter().map(|id| {
VarDeclarator {
span: DUMMY_SP,
name: id.clone().into(),
init: Some(Box::new(
Ident {
sym: js_word!("arguments"),
..id
}
.into(),
)),
definite: false,
}
}));
handler.explode_stmts(hoister.functions);
handler.explode_stmts(body.stmts);
let mut cases = vec![];
handler.extend_cases(&mut cases);
let try_locs_list = handler.get_try_locs_list();
// Intentionally fall through to the "end" case...
cases.push(SwitchCase {
span: DUMMY_SP,
test: Some(handler.final_loc().into()),
// fallthrough
cons: vec![],
});
cases.push(SwitchCase {
span: DUMMY_SP,
test: Some("end".into()),
cons: vec![ReturnStmt {
span: DUMMY_SP,
// _ctx.stop()
arg: Some(Box::new(Expr::Call(CallExpr {
span: DUMMY_SP,
callee: ctx.clone().make_member(quote_ident!("stop")).as_callee(),
args: vec![],
type_args: Default::default(),
}))),
}
.into()],
});
let stmts = vec![Stmt::While(WhileStmt {
span: DUMMY_SP,
test: 1.0.into(),
body: Box::new(
SwitchStmt {
span: DUMMY_SP,
// _ctx.prev = _ctx.next
discriminant: Box::new(
AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: PatOrExpr::Expr(Box::new(
ctx.clone().make_member(quote_ident!("prev")),
)),
right: Box::new(ctx.clone().make_member(quote_ident!("next"))),
}
.into(),
),
cases,
}
.into(),
),
})];
f.body = Some(BlockStmt {
span: body_span,
stmts: {
let mut buf = vec![];
if !outer_fn_vars.is_empty() {
buf.push(Stmt::Decl(Decl::Var(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: outer_fn_vars,
declare: false,
})));
}
buf.push(
ReturnStmt {
span: DUMMY_SP,
arg: Some(Box::new(Expr::Call(CallExpr {
span: DUMMY_SP,
callee: self
.regenerator_runtime
.clone()
.unwrap()
.make_member(quote_ident!("wrap"))
.as_callee(),
args: {
let mut args = vec![FnExpr {
ident: Some(inner_name),
function: Function {
params: vec![Param {
span: DUMMY_SP,
decorators: Default::default(),
pat: ctx.clone().into(),
}],
decorators: Default::default(),
span: DUMMY_SP,
body: Some(BlockStmt {
span: DUMMY_SP,
stmts,
}),
is_generator: false,
is_async: false,
type_params: None,
return_type: None,
},
}
.as_arg()];
if f.is_generator {
args.push(marked_ident.as_arg());
} else if uses_this || try_locs_list.is_some() {
// Async functions that are not generators
// don't care about the
// outer function because they don't need it
// to be marked and don't
// inherit from its .prototype.
args.push(Lit::Null(Null { span: DUMMY_SP }).as_arg());
}
if uses_this {
args.push(ThisExpr { span: DUMMY_SP }.as_arg())
} else if try_locs_list.is_some() {
args.push(Null { span: DUMMY_SP }.as_arg());
}
if let Some(try_locs_list) = try_locs_list {
args.push(try_locs_list.as_arg())
}
args
},
type_args: None,
}))),
}
.into(),
);
buf
},
});
f.is_generator = false;
i
}
}
// function sent is still stage 2, we good
// struct FnSentVisitor {
// ctx: Ident,
// }
// impl VisitMut for FnSentVisitor {
// noop_visit_mut_type!();
// fn visit_mut_expr(&mut self, e: &mut Expr) {
// e.visit_mut_children_with(self);
// if let Expr::MetaProp(MetaPropExpr { meta, prop }) = e {
// if meta.sym == *"function" && prop.sym == *"sent" {
// *e = self.ctx.clone().make_member(quote_ident!("_sent"));
// }
// }
// }
// }
/// Finds a generator function
struct Finder {
found: bool,
}
impl Finder {
fn find<T: VisitWith<Self>>(node: &T) -> bool {
let mut v = Finder { found: false };
node.visit_with(&mut v);
v.found
}
}
impl Visit for Finder {
noop_visit_type!();
fn visit_function(&mut self, node: &Function) {
if node.is_generator {
self.found = true;
return;
}
node.visit_children_with(self);
}
}
| 31.895683 | 100 | 0.424495 |
cce14f8268b7c3aaf1f6d500ebab4a2def813ee6 | 6,143 | /*
* MIT License
*
* Copyright (c) 2020 Reto Achermann
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* SPDX-License-Identifier: MIT
*/
/***********************************************************************************************
* ***
*
* !!!! WARNING: THIS FILE IS AUTO GENERATED. ANY CHANGES MAY BE OVERWRITTEN !!!!
*
* Generated on: 2020-10-05T16:49:32.052820
* Version: Armv8.7-A-2020-09
* Source: https://developer.arm.com/-/media/developer/products/architecture/armv8-a-architecture/2020-09/SysReg_xml_v87A-2020-09.tar.gz
*
* !!!! WARNING: THIS FILE IS AUTO GENERATED. ANY CHANGES MAY BE OVERWRITTEN !!!!
*
**********************************************************************************************
* * */
/*
* ================================================================================================
* Register Information
* ================================================================================================
*
* Register: AArch64 Processor Feature Register 1 (id_aa64pfr1_el1)
* Group: Identification registers
* Type: 64-bit Register
* Description: Reserved for future expansion of information about implemented PE features in
* AArch64 state. File: AArch64-id_aa64pfr1_el1.xml
*/
/*
* ================================================================================================
* Register Read/Write Functions
* ================================================================================================
*/
/// reading the AArch64 Processor Feature Register 1 (id_aa64pfr1_el1) register
pub fn reg_rawrd() -> u64 {
let mut regval: u64;
unsafe {
// MRS <Xt>, ID_AA64PFR1_EL1
llvm_asm!("mrs $0, id_aa64pfr1_el1" : "=r"(regval));
}
return regval;
}
// register is not writable. not emitting write accessor
/*
* ================================================================================================
* Register Fields Read/Write Functions
* ================================================================================================
*/
/// reads field val from register
pub fn mpam_frac_read() -> u64 {
// bits 16..19
let val = reg_rawrd();
(val >> 16) & 0xf
}
// register is not writable, omitting writing to field
/// reads field val from register
pub fn ras_frac_read() -> u64 {
// bits 12..15
let val = reg_rawrd();
(val >> 12) & 0xf
}
// register is not writable, omitting writing to field
/// reads field val from register
pub fn mte_read() -> u64 {
// bits 8..11
let val = reg_rawrd();
(val >> 8) & 0xf
}
// register is not writable, omitting writing to field
/// reads field val from register
pub fn ssbs_read() -> u64 {
// bits 4..7
let val = reg_rawrd();
(val >> 4) & 0xf
}
// register is not writable, omitting writing to field
/// reads field val from register
pub fn bt_read() -> u64 {
// bits 0..3
let val = reg_rawrd();
(val >> 0) & 0xf
}
// register is not writable, omitting writing to field
/*
* ================================================================================================
* Data Structure Definitions
* ================================================================================================
*/
/// struct holding a copy of the AArch64 Processor Feature Register 1 value in memory
pub struct RegVal {
val: u64,
}
/// struct implementation for accessing the fields of register id_aa64pfr1_el1
impl RegVal {
// creates a new default value
pub fn default() -> RegVal {
RegVal { val: 0 }
}
/// inserts field val into current value
pub fn current(&mut self) -> RegVal {
let curval = reg_rawrd() & 0xfffff;
RegVal { val: curval }
}
/// extracts field val from current value
pub fn read(&mut self) {
self.val = reg_rawrd() & 0xfffff
}
// no write() method as it is read only
// sets the value of the struct
pub fn set(&mut self, newval: u64) {
self.val = newval & 1048575;
}
// gets the value of the struct
pub fn get(&self) -> u64 {
self.val
}
/// extracts field val from current value
pub fn mpam_frac_extract(&mut self) -> u64 {
// bits 16..19
(self.val >> 16) & 0xf
}
// no insert() method for field mpam_frac
/// extracts field val from current value
pub fn ras_frac_extract(&mut self) -> u64 {
// bits 12..15
(self.val >> 12) & 0xf
}
// no insert() method for field ras_frac
/// extracts field val from current value
pub fn mte_extract(&mut self) -> u64 {
// bits 8..11
(self.val >> 8) & 0xf
}
// no insert() method for field mte
/// extracts field val from current value
pub fn ssbs_extract(&mut self) -> u64 {
// bits 4..7
(self.val >> 4) & 0xf
}
// no insert() method for field ssbs
/// extracts field val from current value
pub fn bt_extract(&mut self) -> u64 {
// bits 0..3
(self.val >> 0) & 0xf
}
// no insert() method for field bt
}
| 29.392344 | 136 | 0.541104 |
ab26bddff0cbf8f9207c7523672a9ebeca8743a1 | 4,901 | use criterion::black_box;
const N: usize = 8;
const ITER: usize = 8;
fn bench_io_uring() {
use io_uring::{opcode, IoUring};
let mut io_uring = IoUring::new(N as _).unwrap();
for _ in 0..ITER {
let mut sq = io_uring.submission();
for i in 0..N {
let nop_e = opcode::Nop::new().build().user_data(black_box(i as _));
unsafe {
sq.push(&nop_e).ok().unwrap();
}
}
drop(sq);
io_uring.submit_and_wait(N).unwrap();
io_uring.completion().map(black_box).for_each(drop);
}
}
#[cfg(feature = "unstable")]
fn bench_io_uring_batch() {
use io_uring::{opcode, IoUring};
use std::mem;
let mut io_uring = IoUring::new(N as _).unwrap();
let sqes = [
opcode::Nop::new().build().user_data(black_box(0)),
opcode::Nop::new().build().user_data(black_box(1)),
opcode::Nop::new().build().user_data(black_box(2)),
opcode::Nop::new().build().user_data(black_box(3)),
opcode::Nop::new().build().user_data(black_box(4)),
opcode::Nop::new().build().user_data(black_box(5)),
opcode::Nop::new().build().user_data(black_box(6)),
opcode::Nop::new().build().user_data(black_box(7)),
];
let mut cqes = [
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
mem::MaybeUninit::uninit(),
];
for _ in 0..ITER {
unsafe {
io_uring.submission().push_multiple(&sqes).ok().unwrap();
}
io_uring.submit_and_wait(N).unwrap();
let cqes = io_uring.completion().fill(&mut cqes);
assert_eq!(cqes.len(), N);
cqes.iter().map(black_box).for_each(drop);
}
}
fn bench_iou() {
use iou::IoUring;
let mut io_uring = IoUring::new(N as _).unwrap();
for _ in 0..ITER {
let mut sq = io_uring.sq();
for i in 0..N {
unsafe {
let mut sqe = sq.prepare_sqe().unwrap();
sqe.prep_nop();
sqe.set_user_data(black_box(i as _));
}
}
sq.submit_and_wait(N as _).unwrap();
io_uring.cqes().map(black_box).for_each(drop);
}
}
fn bench_uring_sys() {
use std::{mem, ptr};
use uring_sys::*;
let mut io_uring = mem::MaybeUninit::<io_uring>::uninit();
unsafe {
if io_uring_queue_init(N as _, io_uring.as_mut_ptr(), 0) != 0 {
panic!()
}
}
let mut io_uring = unsafe { io_uring.assume_init() };
for _ in 0..ITER {
for i in 0..N {
unsafe {
let sqe = io_uring_get_sqe(&mut io_uring);
if sqe.is_null() {
panic!()
}
io_uring_prep_nop(sqe);
io_uring_sqe_set_data(sqe, i as _);
}
}
unsafe {
if io_uring_submit_and_wait(&mut io_uring, N as _) < 0 {
panic!()
}
}
loop {
unsafe {
let mut cqe = ptr::null_mut();
io_uring_peek_cqe(&mut io_uring, &mut cqe);
if cqe.is_null() {
break;
}
black_box(cqe);
io_uring_cq_advance(&mut io_uring, 1);
}
}
}
}
fn bench_uring_sys_batch() {
use std::{mem, ptr};
use uring_sys::*;
let mut io_uring = mem::MaybeUninit::<io_uring>::uninit();
unsafe {
if io_uring_queue_init(N as _, io_uring.as_mut_ptr(), 0) != 0 {
panic!()
}
}
let mut io_uring = unsafe { io_uring.assume_init() };
let mut cqes = [ptr::null_mut(); 8];
for _ in 0..ITER {
for i in 0..N {
unsafe {
let sqe = io_uring_get_sqe(&mut io_uring);
if sqe.is_null() {
panic!()
}
io_uring_prep_nop(sqe);
io_uring_sqe_set_data(sqe, i as _);
}
}
unsafe {
if io_uring_submit_and_wait(&mut io_uring, N as _) < 0 {
panic!()
}
}
unsafe {
if io_uring_peek_batch_cqe(&mut io_uring, cqes.as_mut_ptr(), cqes.len() as _) != 8 {
panic!()
}
io_uring_cq_advance(&mut io_uring, 8);
}
cqes.iter().copied().map(black_box).for_each(drop);
}
}
#[cfg(feature = "unstable")]
iai::main!(
bench_io_uring,
bench_io_uring_batch,
bench_iou,
bench_uring_sys,
bench_uring_sys_batch
);
#[cfg(not(feature = "unstable"))]
iai::main!(
bench_io_uring,
bench_iou,
bench_uring_sys,
bench_uring_sys_batch
);
| 23.676329 | 96 | 0.507651 |
e599c3219ce844304376aacc8c6d0992cdfaee98 | 1,269 | // Unless explicitly stated otherwise all files in this repository are licensed
// under the MIT/Apache-2.0 License, at your convenience
//
// This product includes software developed at Datadog (https://www.datadoghq.com/). Copyright 2020
// Datadog, Inc.
#[cfg(test)]
mod tests {
use std::process::Command;
#[test]
fn check_formating() {
let status = Command::new("cargo")
.args(["+nightly", "fmt", "--all", "--", "--check"])
.status()
.unwrap();
assert!(
status.success(),
"cargo fmt failed. Note that glommio uses nightly for formatting, so please invoke \
cargo with +nightly"
);
}
#[test]
fn check_clippy() {
let status = Command::new("cargo")
.args(["+stable", "clippy", "--all-targets", "--", "-D", "warnings"])
.status()
.unwrap();
assert!(status.success());
}
#[test]
fn check_dependencies_sorted() {
let status = Command::new("cargo")
.args(["sort", "-w", "-c"])
.status()
.unwrap();
assert!(
status.success(),
"cargo-sort not installed or cargo.toml dependencies not sorted"
);
}
}
| 29.511628 | 99 | 0.527975 |
1df48d24501120ea030f4574747290647281f95e | 219 | pub fn add(x: i32, y: i32) -> i32 {
// 一个模块总是可以访问其父级作用域(通过 super::)
// 即便是是父级作用域的私有变量、私有函数等。 DEBUG 是私有的,但我们可以在 add 模块中使用它
if super::DEBUG {
println!("[DEBUG]: add({}, {})", x, y);
}
x + y
}
| 24.333333 | 58 | 0.52968 |
e572ff3eb823cccd0199b95c060ef2b4a2d0cba1 | 13,851 | use std::marker::PhantomData;
use std::os::raw::c_int;
use error::Result;
use ffi;
use types::{Integer, LuaRef};
use util::{assert_stack, protect_lua, protect_lua_closure, StackGuard};
use value::{FromLua, Nil, ToLua, Value};
/// Handle to an internal Lua table.
#[derive(Clone, Debug)]
pub struct Table<'lua>(pub(crate) LuaRef<'lua>);
impl<'lua> Table<'lua> {
/// Sets a key-value pair in the table.
///
/// If the value is `nil`, this will effectively remove the pair.
///
/// This might invoke the `__newindex` metamethod. Use the [`raw_set`] method if that is not
/// desired.
///
/// # Examples
///
/// Export a value as a global to make it usable from Lua:
///
/// ```
/// # extern crate rlua;
/// # use rlua::{Lua, Result};
/// # fn main() -> Result<()> {
/// # Lua::new().context(|lua_context| {
/// let globals = lua_context.globals();
///
/// globals.set("assertions", cfg!(debug_assertions))?;
///
/// lua_context.exec::<_, ()>(r#"
/// if assertions == true then
/// -- ...
/// elseif assertions == false then
/// -- ...
/// else
/// error("assertions neither on nor off?")
/// end
/// "#, None)?;
/// # Ok(())
/// # })
/// # }
/// ```
///
/// [`raw_set`]: #method.raw_set
pub fn set<K: ToLua<'lua>, V: ToLua<'lua>>(&self, key: K, value: V) -> Result<()> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = value.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 6);
lua.push_ref(&self.0);
lua.push_value(key);
lua.push_value(value);
unsafe extern "C" fn set_table(state: *mut ffi::lua_State) -> c_int {
ffi::lua_settable(state, -3);
1
}
protect_lua(lua.state, 3, set_table)
}
}
/// Gets the value associated to `key` from the table.
///
/// If no value is associated to `key`, returns the `nil` value.
///
/// This might invoke the `__index` metamethod. Use the [`raw_get`] method if that is not
/// desired.
///
/// # Examples
///
/// Query the version of the Lua interpreter:
///
/// ```
/// # extern crate rlua;
/// # use rlua::{Lua, Result};
/// # fn main() -> Result<()> {
/// # Lua::new().context(|lua_context| {
/// let globals = lua_context.globals();
///
/// let version: String = globals.get("_VERSION")?;
/// println!("Lua version: {}", version);
/// # Ok(())
/// # })
/// # }
/// ```
///
/// [`raw_get`]: #method.raw_get
pub fn get<K: ToLua<'lua>, V: FromLua<'lua>>(&self, key: K) -> Result<V> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 5);
lua.push_ref(&self.0);
lua.push_value(key);
unsafe extern "C" fn get_table(state: *mut ffi::lua_State) -> c_int {
ffi::lua_gettable(state, -2);
1
}
protect_lua(lua.state, 2, get_table)?;
lua.pop_value()
};
V::from_lua(value, lua)
}
/// Checks whether the table contains a non-nil value for `key`.
pub fn contains_key<K: ToLua<'lua>>(&self, key: K) -> Result<bool> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 5);
lua.push_ref(&self.0);
lua.push_value(key);
unsafe extern "C" fn get_table(state: *mut ffi::lua_State) -> c_int {
ffi::lua_gettable(state, -2);
1
}
protect_lua(lua.state, 2, get_table)?;
let has = ffi::lua_isnil(lua.state, -1) == 0;
Ok(has)
}
}
/// Sets a key-value pair without invoking metamethods.
pub fn raw_set<K: ToLua<'lua>, V: ToLua<'lua>>(&self, key: K, value: V) -> Result<()> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = value.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 6);
lua.push_ref(&self.0);
lua.push_value(key);
lua.push_value(value);
unsafe extern "C" fn raw_set(state: *mut ffi::lua_State) -> c_int {
ffi::lua_rawset(state, -3);
0
}
protect_lua(lua.state, 3, raw_set)?;
Ok(())
}
}
/// Gets the value associated to `key` without invoking metamethods.
pub fn raw_get<K: ToLua<'lua>, V: FromLua<'lua>>(&self, key: K) -> Result<V> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 3);
lua.push_ref(&self.0);
lua.push_value(key);
ffi::lua_rawget(lua.state, -2);
lua.pop_value()
};
V::from_lua(value, lua)
}
/// Returns the result of the Lua `#` operator.
///
/// This might invoke the `__len` metamethod. Use the [`raw_len`] method if that is not desired.
///
/// [`raw_len`]: #method.raw_len
pub fn len(&self) -> Result<Integer> {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 4);
lua.push_ref(&self.0);
protect_lua_closure(lua.state, 1, 0, |state| ffi::luaL_len(state, -1))
}
}
/// Returns the result of the Lua `#` operator, without invoking the `__len` metamethod.
pub fn raw_len(&self) -> Integer {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 1);
lua.push_ref(&self.0);
let len = ffi::lua_rawlen(lua.state, -1);
len as Integer
}
}
/// Returns a reference to the metatable of this table, or `None` if no metatable is set.
///
/// Unlike the `getmetatable` Lua function, this method ignores the `__metatable` field.
pub fn get_metatable(&self) -> Option<Table<'lua>> {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 1);
lua.push_ref(&self.0);
if ffi::lua_getmetatable(lua.state, -1) == 0 {
None
} else {
let table = Table(lua.pop_ref());
Some(table)
}
}
}
/// Sets or removes the metatable of this table.
///
/// If `metatable` is `None`, the metatable is removed (if no metatable is set, this does
/// nothing).
pub fn set_metatable(&self, metatable: Option<Table<'lua>>) {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 1);
lua.push_ref(&self.0);
if let Some(metatable) = metatable {
lua.push_ref(&metatable.0);
} else {
ffi::lua_pushnil(lua.state);
}
ffi::lua_setmetatable(lua.state, -2);
}
}
/// Consume this table and return an iterator over the pairs of the table.
///
/// This works like the Lua `pairs` function, but does not invoke the `__pairs` metamethod.
///
/// The pairs are wrapped in a [`Result`], since they are lazily converted to `K` and `V` types.
///
/// # Note
///
/// While this method consumes the `Table` object, it can not prevent code from mutating the
/// table while the iteration is in progress. Refer to the [Lua manual] for information about
/// the consequences of such mutation.
///
/// # Examples
///
/// Iterate over all globals:
///
/// ```
/// # extern crate rlua;
/// # use rlua::{Lua, Result, Value};
/// # fn main() -> Result<()> {
/// # Lua::new().context(|lua_context| {
/// let globals = lua_context.globals();
///
/// for pair in globals.pairs::<Value, Value>() {
/// let (key, value) = pair?;
/// # let _ = (key, value); // used
/// // ...
/// }
/// # Ok(())
/// # })
/// # }
/// ```
///
/// [`Result`]: type.Result.html
/// [Lua manual]: http://www.lua.org/manual/5.3/manual.html#pdf-next
pub fn pairs<K: FromLua<'lua>, V: FromLua<'lua>>(self) -> TablePairs<'lua, K, V> {
TablePairs {
table: self.0,
next_key: Some(Nil),
_phantom: PhantomData,
}
}
/// Consume this table and return an iterator over all values in the sequence part of the table.
///
/// The iterator will yield all values `t[1]`, `t[2]`, and so on, until a `nil` value is
/// encountered. This mirrors the behaviour of Lua's `ipairs` function and will invoke the
/// `__index` metamethod according to the usual rules. However, the deprecated `__ipairs`
/// metatable will not be called.
///
/// Just like [`pairs`], the values are wrapped in a [`Result`].
///
/// # Note
///
/// While this method consumes the `Table` object, it can not prevent code from mutating the
/// table while the iteration is in progress. Refer to the [Lua manual] for information about
/// the consequences of such mutation.
///
/// # Examples
///
/// ```
/// # extern crate rlua;
/// # use rlua::{Lua, Result, Table};
/// # fn main() -> Result<()> {
/// # Lua::new().context(|lua_context| {
/// let my_table: Table = lua_context.eval("{ [1] = 4, [2] = 5, [4] = 7, key = 2 }", None)?;
///
/// let expected = [4, 5];
/// for (&expected, got) in expected.iter().zip(my_table.sequence_values::<u32>()) {
/// assert_eq!(expected, got?);
/// }
/// # Ok(())
/// # })
/// # }
/// ```
///
/// [`pairs`]: #method.pairs
/// [`Result`]: type.Result.html
/// [Lua manual]: http://www.lua.org/manual/5.3/manual.html#pdf-next
pub fn sequence_values<V: FromLua<'lua>>(self) -> TableSequence<'lua, V> {
TableSequence {
table: self.0,
index: Some(1),
_phantom: PhantomData,
}
}
}
/// An iterator over the pairs of a Lua table.
///
/// This struct is created by the [`Table::pairs`] method.
///
/// [`Table::pairs`]: struct.Table.html#method.pairs
pub struct TablePairs<'lua, K, V> {
table: LuaRef<'lua>,
next_key: Option<Value<'lua>>,
_phantom: PhantomData<(K, V)>,
}
impl<'lua, K, V> Iterator for TablePairs<'lua, K, V>
where
K: FromLua<'lua>,
V: FromLua<'lua>,
{
type Item = Result<(K, V)>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(next_key) = self.next_key.take() {
let lua = self.table.lua;
let res = (|| {
let res = unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 6);
lua.push_ref(&self.table);
lua.push_value(next_key);
if protect_lua_closure(lua.state, 2, ffi::LUA_MULTRET, |state| {
ffi::lua_next(state, -2) != 0
})? {
ffi::lua_pushvalue(lua.state, -2);
let key = lua.pop_value();
let value = lua.pop_value();
self.next_key = Some(lua.pop_value());
Some((key, value))
} else {
None
}
};
Ok(if let Some((key, value)) = res {
Some((K::from_lua(key, lua)?, V::from_lua(value, lua)?))
} else {
None
})
})();
match res {
Ok(Some((key, value))) => Some(Ok((key, value))),
Ok(None) => None,
Err(e) => Some(Err(e)),
}
} else {
None
}
}
}
/// An iterator over the sequence part of a Lua table.
///
/// This struct is created by the [`Table::sequence_values`] method.
///
/// [`Table::sequence_values`]: struct.Table.html#method.sequence_values
pub struct TableSequence<'lua, V> {
table: LuaRef<'lua>,
index: Option<Integer>,
_phantom: PhantomData<V>,
}
impl<'lua, V> Iterator for TableSequence<'lua, V>
where
V: FromLua<'lua>,
{
type Item = Result<V>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(index) = self.index.take() {
let lua = self.table.lua;
let res = unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 5);
lua.push_ref(&self.table);
match protect_lua_closure(lua.state, 1, 1, |state| ffi::lua_geti(state, -1, index))
{
Ok(ffi::LUA_TNIL) => None,
Ok(_) => {
let value = lua.pop_value();
self.index = Some(index + 1);
Some(Ok(value))
}
Err(err) => Some(Err(err)),
}
};
match res {
Some(Ok(r)) => Some(V::from_lua(r, lua)),
Some(Err(err)) => Some(Err(err)),
None => None,
}
} else {
None
}
}
}
| 31.408163 | 100 | 0.496787 |
90f5a8c4ecb9a15316b20e7ade1cc14f79de1428 | 103 | // rustfmt-force_explicit_abi: true
// Force explicit abi
extern "C" {
pub static lorem: c_int;
}
| 14.714286 | 35 | 0.68932 |
2646b8482b8d811c759464eff8d7fc12b442a700 | 2,955 | use std::env;
use std::ffi::OsString;
use structopt::StructOpt;
use which::which_in;
use volta_core::error::ErrorDetails;
use volta_core::platform::System;
use volta_core::run::DefaultBinary;
use volta_core::session::{ActivityKind, Session};
use volta_fail::{ExitCode, Fallible, ResultExt};
use crate::command::Command;
#[derive(StructOpt)]
pub(crate) struct Which {
/// The binary to find, e.g. `node` or `npm`
binary: String,
}
impl Command for Which {
// 1. Start by checking if the user has a tool installed in the project or
// as a user default. If so, we're done.
// 2. Otherwise, use the platform image and/or the system environment to
// determine a lookup path to run `which` in.
fn run(self, session: &mut Session) -> Fallible<ExitCode> {
session.add_event_start(ActivityKind::Which);
let bin = OsString::from(self.binary.as_str());
let user_tool = DefaultBinary::from_name(&bin, session)?;
let project_bin_path =
session
.project()?
.and_then(|project| match project.has_direct_bin(&bin) {
Ok(true) => Some(project.local_bin_dir()),
_ => None,
});
let tool_path = match (user_tool, project_bin_path) {
(Some(_), Some(project_bin_dir)) => Some(project_bin_dir.join(&bin)),
(Some(tool), _) => Some(tool.bin_path),
_ => None,
};
if let Some(path) = tool_path {
println!("{}", path.to_string_lossy());
let exit_code = ExitCode::Success;
session.add_event_end(ActivityKind::Which, exit_code);
return Ok(exit_code);
}
// Treat any error with obtaining the current platform image as if the image doesn't exist
// However, errors in obtaining the current working directory or the System path should
// still be treated as errors.
let path = match session
.current_platform()
.unwrap_or(None)
.and_then(|platform| platform.checkout(session).ok())
.and_then(|image| image.path().ok())
{
Some(path) => path,
None => System::path()?,
};
let cwd = env::current_dir().with_context(|_| ErrorDetails::CurrentDirError)?;
let exit_code = match which_in(&bin, Some(path), cwd) {
Ok(result) => {
println!("{}", result.to_string_lossy());
ExitCode::Success
}
Err(_) => {
// `which_in` Will return an Err if it can't find the binary in the path
// In that case, we don't want to print anything out, but we want to return
// Exit Code 1 (ExitCode::UnknownError)
ExitCode::UnknownError
}
};
session.add_event_end(ActivityKind::Which, exit_code);
Ok(exit_code)
}
}
| 34.764706 | 98 | 0.579357 |
fcea83aac9b0172882a7349d0616c9245006428e | 4,781 | #![cfg(feature = "test-bpf")]
mod utils;
use audius_reward_manager::instruction;
use borsh::BorshSerialize;
use solana_program::program_option::COption;
use solana_program::program_pack::IsInitialized;
use solana_sdk::signature::Keypair;
use utils::program_test;
use solana_program::instruction::InstructionError;
use solana_program::{program_pack::Pack, pubkey::Pubkey};
use solana_program_test::*;
use solana_sdk::{
account::Account,
signature::Signer,
system_instruction::create_account,
transaction::{Transaction, TransactionError},
};
#[tokio::test]
/// Test reward manager successfully initializes with expected state
async fn success_init_reward_manager() {
let mut program_test = program_test();
let manager = Pubkey::new_unique();
let reward_manager = Keypair::new();
let token_account = Keypair::new();
let mint = Pubkey::new_unique();
let min_votes = 3;
let mut data = vec![0u8; spl_token::state::Mint::LEN];
let mint_data = spl_token::state::Mint {
mint_authority: COption::None,
supply: 100,
decimals: 4,
is_initialized: true,
freeze_authority: COption::None,
};
mint_data.pack_into_slice(data.as_mut_slice());
program_test.add_account(
mint,
Account {
lamports: 9000,
data,
owner: audius_reward_manager::id(),
executable: false,
rent_epoch: 0,
},
);
let mut context = program_test.start_with_context().await;
let rent = context.banks_client.get_rent().await.unwrap();
let tx = Transaction::new_signed_with_payer(
&[
create_account(
&context.payer.pubkey(),
&reward_manager.pubkey(),
rent.minimum_balance(audius_reward_manager::state::RewardManager::LEN),
audius_reward_manager::state::RewardManager::LEN as _,
&audius_reward_manager::id(),
),
create_account(
&context.payer.pubkey(),
&token_account.pubkey(),
rent.minimum_balance(spl_token::state::Account::LEN),
spl_token::state::Account::LEN as _,
&spl_token::id(),
),
instruction::init(
&audius_reward_manager::id(),
&reward_manager.pubkey(),
&token_account.pubkey(),
&mint,
&manager,
min_votes,
)
.unwrap(),
],
Some(&context.payer.pubkey()),
&[&context.payer, &reward_manager, &token_account],
context.last_blockhash,
);
context.banks_client.process_transaction(tx).await.unwrap();
assert_eq!(
audius_reward_manager::state::RewardManager::new(
token_account.pubkey(),
manager,
min_votes
),
context
.banks_client
.get_account_data_with_borsh(reward_manager.pubkey())
.await
.unwrap()
);
let token_data: spl_token::state::Account = context
.banks_client
.get_packed_account_data(token_account.pubkey())
.await
.unwrap();
assert!(token_data.is_initialized());
}
#[tokio::test]
/// Test rewards manager fails to initialized if already initialized
async fn fail_already_initialized() {
let mut program_test = program_test();
let reward_manager = Pubkey::new_unique();
let token_account = Pubkey::new_unique();
let mint = Pubkey::new_unique();
let manager = Pubkey::new_unique();
let mut data = Vec::<u8>::with_capacity(audius_reward_manager::state::RewardManager::LEN);
audius_reward_manager::state::RewardManager::new(token_account, manager, 3)
.serialize(&mut data)
.unwrap();
program_test.add_account(
reward_manager,
Account {
lamports: 9000,
data,
owner: audius_reward_manager::id(),
executable: false,
rent_epoch: 0,
},
);
let mut context = program_test.start_with_context().await;
let tx = Transaction::new_signed_with_payer(
&[instruction::init(
&audius_reward_manager::id(),
&reward_manager,
&token_account,
&mint,
&manager,
3,
)
.unwrap()],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
assert_eq!(
context
.banks_client
.process_transaction(tx)
.await
.unwrap_err()
.unwrap(),
TransactionError::InstructionError(0, InstructionError::AccountAlreadyInitialized)
);
}
| 29.88125 | 94 | 0.592345 |
f72d578c55fe4e2b19d6a084b38c4942c8d03066 | 29,022 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
access_path::AccessPath,
account_address::AccountAddress,
account_config::AccountResource,
account_state_blob::AccountStateBlob,
block_metadata::BlockMetaData,
byte_array::ByteArray,
contract_event::ContractEvent,
event::{EventHandle, EventKey},
get_with_proof::{ResponseItem, UpdateToLatestLedgerResponse},
ledger_info::{LedgerInfo, LedgerInfoWithSignatures},
proof::AccumulatorProof,
transaction::{
Module, Program, RawTransaction, Script, SignatureCheckedTransaction, SignedTransaction,
TransactionArgument, TransactionInfo, TransactionListWithProof, TransactionPayload,
TransactionStatus, TransactionToCommit, Version,
},
validator_change::ValidatorChangeEventWithProof,
vm_error::{StatusCode, VMStatus},
write_set::{WriteOp, WriteSet, WriteSetMut},
};
use crypto::{
ed25519::{compat::keypair_strategy, *},
hash::CryptoHash,
traits::*,
HashValue,
};
use proptest::{
collection::{vec, SizeRange},
option,
prelude::*,
};
use proptest_derive::Arbitrary;
use proptest_helpers::Index;
use std::time::Duration;
prop_compose! {
#[inline]
pub fn arb_byte_array()(byte_array in vec(any::<u8>(), 1..=10)) -> ByteArray {
ByteArray::new(byte_array)
}
}
impl Arbitrary for ByteArray {
type Parameters = ();
#[inline]
fn arbitrary_with(_args: ()) -> Self::Strategy {
arb_byte_array().boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl WriteOp {
pub fn value_strategy() -> impl Strategy<Value = Self> {
vec(any::<u8>(), 0..64).prop_map(WriteOp::Value)
}
pub fn deletion_strategy() -> impl Strategy<Value = Self> {
Just(WriteOp::Deletion)
}
}
impl Arbitrary for WriteOp {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
prop_oneof![Self::deletion_strategy(), Self::value_strategy()].boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl WriteSet {
fn genesis_strategy() -> impl Strategy<Value = Self> {
vec((any::<AccessPath>(), WriteOp::value_strategy()), 0..64).prop_map(|write_set| {
let write_set_mut = WriteSetMut::new(write_set);
write_set_mut
.freeze()
.expect("generated write sets should always be valid")
})
}
}
impl Arbitrary for WriteSet {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
// XXX there's no checking for repeated access paths here, nor in write_set. Is that
// important? Not sure.
vec((any::<AccessPath>(), any::<WriteOp>()), 0..64)
.prop_map(|write_set| {
let write_set_mut = WriteSetMut::new(write_set);
write_set_mut
.freeze()
.expect("generated write sets should always be valid")
})
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
#[derive(Debug)]
struct AccountInfo {
address: AccountAddress,
private_key: Ed25519PrivateKey,
public_key: Ed25519PublicKey,
sequence_number: u64,
sent_event_handle: EventHandle,
received_event_handle: EventHandle,
}
impl AccountInfo {
pub fn new(private_key: Ed25519PrivateKey, public_key: Ed25519PublicKey) -> Self {
let address = AccountAddress::from_public_key(&public_key);
Self {
address,
private_key,
public_key,
sequence_number: 0,
sent_event_handle: EventHandle::new_from_address(&address, 0),
received_event_handle: EventHandle::new_from_address(&address, 1),
}
}
}
#[derive(Debug)]
pub struct AccountInfoUniverse {
accounts: Vec<AccountInfo>,
}
impl AccountInfoUniverse {
fn new(keypairs: Vec<(Ed25519PrivateKey, Ed25519PublicKey)>) -> Self {
let accounts = keypairs
.into_iter()
.map(|(private_key, public_key)| AccountInfo::new(private_key, public_key))
.collect();
Self { accounts }
}
fn get_account_info(&self, account_index: Index) -> &AccountInfo {
account_index.get(&self.accounts)
}
fn get_account_info_mut(&mut self, account_index: Index) -> &mut AccountInfo {
account_index.get_mut(self.accounts.as_mut_slice())
}
}
impl Arbitrary for AccountInfoUniverse {
type Parameters = usize;
fn arbitrary_with(num_accounts: Self::Parameters) -> Self::Strategy {
vec(keypair_strategy(), num_accounts)
.prop_map(Self::new)
.boxed()
}
fn arbitrary() -> Self::Strategy {
unimplemented!("Size of the universe must be provided explicitly (use any_with instead).")
}
type Strategy = BoxedStrategy<Self>;
}
#[derive(Arbitrary, Debug)]
pub struct RawTransactionGen {
payload: TransactionPayload,
max_gas_amount: u64,
gas_unit_price: u64,
expiration_time_secs: u64,
}
impl RawTransactionGen {
pub fn materialize(
self,
sender_index: Index,
universe: &mut AccountInfoUniverse,
) -> RawTransaction {
let mut sender_info = universe.get_account_info_mut(sender_index);
let sequence_number = sender_info.sequence_number;
sender_info.sequence_number += 1;
new_raw_transaction(
sender_info.address,
sequence_number,
self.payload,
self.max_gas_amount,
self.gas_unit_price,
self.expiration_time_secs,
)
}
}
impl RawTransaction {
fn strategy_impl(
address_strategy: impl Strategy<Value = AccountAddress>,
payload_strategy: impl Strategy<Value = TransactionPayload>,
) -> impl Strategy<Value = Self> {
// XXX what other constraints do these need to obey?
(
address_strategy,
any::<u64>(),
payload_strategy,
any::<u64>(),
any::<u64>(),
any::<u64>(),
)
.prop_map(
|(
sender,
sequence_number,
payload,
max_gas_amount,
gas_unit_price,
expiration_time_secs,
)| {
new_raw_transaction(
sender,
sequence_number,
payload,
max_gas_amount,
gas_unit_price,
expiration_time_secs,
)
},
)
}
}
fn new_raw_transaction(
sender: AccountAddress,
sequence_number: u64,
payload: TransactionPayload,
max_gas_amount: u64,
gas_unit_price: u64,
expiration_time_secs: u64,
) -> RawTransaction {
match payload {
TransactionPayload::Program(program) => RawTransaction::new(
sender,
sequence_number,
TransactionPayload::Program(program),
max_gas_amount,
gas_unit_price,
Duration::from_secs(expiration_time_secs),
),
TransactionPayload::Module(module) => RawTransaction::new_module(
sender,
sequence_number,
module,
max_gas_amount,
gas_unit_price,
Duration::from_secs(expiration_time_secs),
),
TransactionPayload::Script(script) => RawTransaction::new_script(
sender,
sequence_number,
script,
max_gas_amount,
gas_unit_price,
Duration::from_secs(expiration_time_secs),
),
TransactionPayload::WriteSet(write_set) => {
// It's a bit unfortunate that max_gas_amount etc is generated but
// not used, but it isn't a huge deal.
RawTransaction::new_write_set(sender, sequence_number, write_set)
}
}
}
impl Arbitrary for RawTransaction {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
Self::strategy_impl(any::<AccountAddress>(), any::<TransactionPayload>()).boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl SignatureCheckedTransaction {
// This isn't an Arbitrary impl because this doesn't generate *any* possible SignedTransaction,
// just one kind of them.
pub fn program_strategy(
keypair_strategy: impl Strategy<Value = (Ed25519PrivateKey, Ed25519PublicKey)>,
) -> impl Strategy<Value = Self> {
Self::strategy_impl(keypair_strategy, TransactionPayload::program_strategy())
}
pub fn script_strategy(
keypair_strategy: impl Strategy<Value = (Ed25519PrivateKey, Ed25519PublicKey)>,
) -> impl Strategy<Value = Self> {
Self::strategy_impl(keypair_strategy, TransactionPayload::script_strategy())
}
pub fn module_strategy(
keypair_strategy: impl Strategy<Value = (Ed25519PrivateKey, Ed25519PublicKey)>,
) -> impl Strategy<Value = Self> {
Self::strategy_impl(keypair_strategy, TransactionPayload::module_strategy())
}
pub fn write_set_strategy(
keypair_strategy: impl Strategy<Value = (Ed25519PrivateKey, Ed25519PublicKey)>,
) -> impl Strategy<Value = Self> {
Self::strategy_impl(keypair_strategy, TransactionPayload::write_set_strategy())
}
pub fn genesis_strategy(
keypair_strategy: impl Strategy<Value = (Ed25519PrivateKey, Ed25519PublicKey)>,
) -> impl Strategy<Value = Self> {
Self::strategy_impl(keypair_strategy, TransactionPayload::genesis_strategy())
}
fn strategy_impl(
keypair_strategy: impl Strategy<Value = (Ed25519PrivateKey, Ed25519PublicKey)>,
payload_strategy: impl Strategy<Value = TransactionPayload>,
) -> impl Strategy<Value = Self> {
(keypair_strategy, payload_strategy)
.prop_flat_map(|(keypair, payload)| {
let address = AccountAddress::from_public_key(&keypair.1);
(
Just(keypair),
RawTransaction::strategy_impl(Just(address), Just(payload)),
)
})
.prop_map(|((private_key, public_key), raw_txn)| {
raw_txn
.sign(&private_key, public_key)
.expect("signing should always work")
})
}
}
#[derive(Arbitrary, Debug)]
pub struct SignatureCheckedTransactionGen {
raw_transaction_gen: RawTransactionGen,
}
impl SignatureCheckedTransactionGen {
pub fn materialize(
self,
sender_index: Index,
universe: &mut AccountInfoUniverse,
) -> SignatureCheckedTransaction {
let raw_txn = self.raw_transaction_gen.materialize(sender_index, universe);
let account_info = universe.get_account_info(sender_index);
raw_txn
.sign(&account_info.private_key, account_info.public_key.clone())
.expect("Signing raw transaction should work.")
}
}
impl Arbitrary for SignatureCheckedTransaction {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
Self::strategy_impl(keypair_strategy(), any::<TransactionPayload>()).boxed()
}
type Strategy = BoxedStrategy<Self>;
}
/// This `Arbitrary` impl only generates valid signed transactions. TODO: maybe add invalid ones?
impl Arbitrary for SignedTransaction {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
any::<SignatureCheckedTransaction>()
.prop_map(|txn| txn.into_inner())
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl TransactionPayload {
pub fn program_strategy() -> impl Strategy<Value = Self> {
any::<Program>().prop_map(TransactionPayload::Program)
}
pub fn script_strategy() -> impl Strategy<Value = Self> {
any::<Script>().prop_map(TransactionPayload::Script)
}
pub fn module_strategy() -> impl Strategy<Value = Self> {
any::<Module>().prop_map(TransactionPayload::Module)
}
pub fn write_set_strategy() -> impl Strategy<Value = Self> {
any::<WriteSet>().prop_map(TransactionPayload::WriteSet)
}
/// Similar to `write_set_strategy` except generates a valid write set for the genesis block.
pub fn genesis_strategy() -> impl Strategy<Value = Self> {
WriteSet::genesis_strategy().prop_map(TransactionPayload::WriteSet)
}
}
/// The `Arbitrary` impl only generates validation statuses since the full enum is too large.
impl Arbitrary for StatusCode {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
prop_oneof![
Just(StatusCode::UNKNOWN_VALIDATION_STATUS),
Just(StatusCode::INVALID_SIGNATURE),
Just(StatusCode::INVALID_AUTH_KEY),
Just(StatusCode::SEQUENCE_NUMBER_TOO_OLD),
Just(StatusCode::SEQUENCE_NUMBER_TOO_NEW),
Just(StatusCode::INSUFFICIENT_BALANCE_FOR_TRANSACTION_FEE),
Just(StatusCode::TRANSACTION_EXPIRED),
Just(StatusCode::SENDING_ACCOUNT_DOES_NOT_EXIST),
Just(StatusCode::REJECTED_WRITE_SET),
Just(StatusCode::INVALID_WRITE_SET),
Just(StatusCode::EXCEEDED_MAX_TRANSACTION_SIZE),
Just(StatusCode::UNKNOWN_SCRIPT),
Just(StatusCode::UNKNOWN_MODULE),
Just(StatusCode::MAX_GAS_UNITS_EXCEEDS_MAX_GAS_UNITS_BOUND),
Just(StatusCode::MAX_GAS_UNITS_BELOW_MIN_TRANSACTION_GAS_UNITS),
Just(StatusCode::GAS_UNIT_PRICE_BELOW_MIN_BOUND),
Just(StatusCode::GAS_UNIT_PRICE_ABOVE_MAX_BOUND),
]
.boxed()
}
}
prop_compose! {
fn arb_transaction_status()(vm_status in any::<VMStatus>()) -> TransactionStatus {
vm_status.into()
}
}
impl Arbitrary for TransactionStatus {
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
arb_transaction_status().boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl Arbitrary for TransactionPayload {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
// Most transactions in practice will be programs, but other parts of the system should
// at least not choke on write set strategies so introduce them with decent probability.
// The figures below are probability weights.
prop_oneof![
4 => Self::program_strategy(),
4 => Self::script_strategy(),
1 => Self::module_strategy(),
1 => Self::write_set_strategy(),
]
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl Arbitrary for Program {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
// XXX This should eventually be an actually valid program, maybe?
// How should we generate random modules?
// The vector sizes are picked out of thin air.
(
vec(any::<u8>(), 0..100),
vec(any::<Vec<u8>>(), 0..100),
vec(any::<TransactionArgument>(), 0..10),
)
.prop_map(|(code, modules, args)| Program::new(code, modules, args))
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl Arbitrary for Script {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
// XXX This should eventually be an actually valid program, maybe?
// The vector sizes are picked out of thin air.
(
vec(any::<u8>(), 0..100),
vec(any::<TransactionArgument>(), 0..10),
)
.prop_map(|(code, args)| Script::new(code, args))
.boxed()
}
}
impl Arbitrary for Module {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
// XXX How should we generate random modules?
// The vector sizes are picked out of thin air.
vec(any::<u8>(), 0..100).prop_map(Module::new).boxed()
}
}
impl Arbitrary for TransactionArgument {
type Parameters = ();
fn arbitrary_with(_args: ()) -> Self::Strategy {
prop_oneof![
any::<u64>().prop_map(TransactionArgument::U64),
any::<AccountAddress>().prop_map(TransactionArgument::Address),
any::<ByteArray>().prop_map(TransactionArgument::ByteArray),
".*".prop_map(TransactionArgument::String),
]
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
prop_compose! {
fn arb_validator_signature_for_hash(hash: HashValue)(
hash in Just(hash),
(private_key, public_key) in keypair_strategy(),
) -> (AccountAddress, Ed25519Signature) {
let signature = private_key.sign_message(&hash);
(AccountAddress::from_public_key(&public_key), signature)
}
}
impl Arbitrary for LedgerInfoWithSignatures<Ed25519Signature> {
type Parameters = SizeRange;
fn arbitrary_with(num_validators_range: Self::Parameters) -> Self::Strategy {
(any::<LedgerInfo>(), Just(num_validators_range))
.prop_flat_map(|(ledger_info, num_validators_range)| {
let hash = ledger_info.hash();
(
Just(ledger_info),
prop::collection::vec(
arb_validator_signature_for_hash(hash),
num_validators_range,
),
)
})
.prop_map(|(ledger_info, signatures)| {
LedgerInfoWithSignatures::new(ledger_info, signatures.into_iter().collect())
})
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
prop_compose! {
fn arb_update_to_latest_ledger_response()(
response_items in vec(any::<ResponseItem>(), 0..10),
ledger_info_with_sigs in any::<LedgerInfoWithSignatures<Ed25519Signature>>(),
validator_change_events in vec(any::<ValidatorChangeEventWithProof<Ed25519Signature>>(), 0..10),
) -> UpdateToLatestLedgerResponse<Ed25519Signature> {
UpdateToLatestLedgerResponse::new(
response_items, ledger_info_with_sigs, validator_change_events)
}
}
impl Arbitrary for UpdateToLatestLedgerResponse<Ed25519Signature> {
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
arb_update_to_latest_ledger_response().boxed()
}
type Strategy = BoxedStrategy<Self>;
}
#[derive(Arbitrary, Debug)]
pub struct ContractEventGen {
payload: Vec<u8>,
use_sent_key: bool,
}
impl ContractEventGen {
pub fn materialize(
self,
account_index: Index,
universe: &mut AccountInfoUniverse,
) -> ContractEvent {
let account_info = universe.get_account_info_mut(account_index);
let event_handle = if self.use_sent_key {
&mut account_info.sent_event_handle
} else {
&mut account_info.received_event_handle
};
let sequence_number = event_handle.count();
*event_handle.count_mut() += 1;
let event_key = event_handle.key();
ContractEvent::new(*event_key, sequence_number, self.payload)
}
}
#[derive(Arbitrary, Debug)]
struct AccountResourceGen {
balance: u64,
delegated_key_rotation_capability: bool,
delegated_withdrawal_capability: bool,
}
impl AccountResourceGen {
pub fn materialize(
self,
account_index: Index,
universe: &AccountInfoUniverse,
) -> AccountResource {
let account_info = universe.get_account_info(account_index);
AccountResource::new(
self.balance,
account_info.sequence_number,
ByteArray::new(account_info.public_key.to_bytes().to_vec()),
self.delegated_key_rotation_capability,
self.delegated_withdrawal_capability,
account_info.sent_event_handle.clone(),
account_info.received_event_handle.clone(),
)
}
}
#[derive(Arbitrary, Debug)]
struct AccountStateBlobGen {
account_resource_gen: AccountResourceGen,
}
impl AccountStateBlobGen {
pub fn materialize(
self,
account_index: Index,
universe: &AccountInfoUniverse,
) -> AccountStateBlob {
let account_resource = self
.account_resource_gen
.materialize(account_index, universe);
AccountStateBlob::from(account_resource)
}
}
impl ContractEvent {
pub fn strategy_impl(
event_key_strategy: impl Strategy<Value = EventKey>,
) -> impl Strategy<Value = Self> {
(event_key_strategy, any::<u64>(), vec(any::<u8>(), 1..10)).prop_map(
|(event_key, seq_num, event_data)| ContractEvent::new(event_key, seq_num, event_data),
)
}
}
impl EventHandle {
pub fn strategy_impl(
event_key_strategy: impl Strategy<Value = EventKey>,
) -> impl Strategy<Value = Self> {
// We only generate small counters so that it won't overflow.
(event_key_strategy, 0..std::u64::MAX / 2)
.prop_map(|(event_key, counter)| EventHandle::new(event_key, counter))
}
}
impl Arbitrary for EventHandle {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
EventHandle::strategy_impl(any::<EventKey>()).boxed()
}
}
impl Arbitrary for ContractEvent {
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
ContractEvent::strategy_impl(any::<EventKey>()).boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl Arbitrary for TransactionToCommit {
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
(
any_with::<AccountInfoUniverse>(1),
any::<TransactionToCommitGen>(),
)
.prop_map(|(mut universe, gen)| gen.materialize(&mut universe))
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
/// Represents information already determined for generating a `TransactionToCommit`, along with
/// to be determined information that needs to settle upon `materialize()`, for example a to be
/// determined account can be represented by an `Index` which will be materialized to an entry in
/// the `AccountInfoUniverse`.
///
/// See `TransactionToCommitGen::materialize()` and supporting types.
#[derive(Debug)]
pub struct TransactionToCommitGen {
/// Transaction sender and the transaction itself.
transaction_gen: (Index, SignatureCheckedTransactionGen),
/// Events: account and event content.
event_gens: Vec<(Index, ContractEventGen)>,
/// State updates: account and the blob.
/// N.B. the transaction sender and event owners must be updated to reflect information such as
/// sequence numbers so that test data generated through this is more realistic and logical.
account_state_gens: Vec<(Index, AccountStateBlobGen)>,
/// Gas used.
gas_used: u64,
/// Transaction status
major_status: StatusCode,
}
impl TransactionToCommitGen {
/// Materialize considering current states in the universe.
pub fn materialize(self, universe: &mut AccountInfoUniverse) -> TransactionToCommit {
let (sender_index, txn_gen) = self.transaction_gen;
let signed_txn = txn_gen.materialize(sender_index, universe).into_inner();
let events = self
.event_gens
.into_iter()
.map(|(index, event_gen)| event_gen.materialize(index, universe))
.collect();
// Account states must be materialized last, to reflect the latest account and event
// sequence numbers.
let account_states = self
.account_state_gens
.into_iter()
.map(|(index, blob_gen)| {
(
universe.get_account_info(index).address,
blob_gen.materialize(index, universe),
)
})
.collect();
TransactionToCommit::new(
signed_txn,
account_states,
events,
self.gas_used,
self.major_status,
)
}
}
impl Arbitrary for TransactionToCommitGen {
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
(
(
any::<Index>(),
any::<AccountStateBlobGen>(),
any::<SignatureCheckedTransactionGen>(),
),
vec(
(
any::<Index>(),
any::<AccountStateBlobGen>(),
any::<ContractEventGen>(),
),
0..=2,
),
vec((any::<Index>(), any::<AccountStateBlobGen>()), 0..=1),
any::<u64>(),
any::<StatusCode>(),
)
.prop_map(
|(sender, event_emitters, mut touched_accounts, gas_used, major_status)| {
// To reflect change of account/event sequence numbers, txn sender account and
// event emitter accounts must be updated.
let (sender_index, sender_blob_gen, txn_gen) = sender;
touched_accounts.push((sender_index, sender_blob_gen));
let mut event_gens = Vec::new();
for (index, blob_gen, event_gen) in event_emitters {
touched_accounts.push((index, blob_gen));
event_gens.push((index, event_gen));
}
Self {
transaction_gen: (sender_index, txn_gen),
event_gens,
account_state_gens: touched_accounts,
gas_used,
major_status,
}
},
)
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
fn arb_transaction_list_with_proof() -> impl Strategy<Value = TransactionListWithProof> {
vec(
(
any::<SignedTransaction>(),
any::<TransactionInfo>(),
vec(any::<ContractEvent>(), 0..10),
),
0..10,
)
.prop_flat_map(|transaction_and_infos_and_events| {
let transaction_and_infos: Vec<_> = transaction_and_infos_and_events
.clone()
.into_iter()
.map(|(transaction, info, _event)| (transaction, info))
.collect();
let events: Vec<_> = transaction_and_infos_and_events
.into_iter()
.map(|(_transaction, _info, event)| event)
.collect();
(
Just(transaction_and_infos),
option::of(Just(events)),
any::<Version>(),
any::<AccumulatorProof>(),
any::<AccumulatorProof>(),
)
})
.prop_map(
|(
transaction_and_infos,
events,
first_txn_version,
proof_of_first_txn,
proof_of_last_txn,
)| {
match transaction_and_infos.len() {
0 => TransactionListWithProof::new_empty(),
1 => TransactionListWithProof::new(
transaction_and_infos,
events,
Some(first_txn_version),
Some(proof_of_first_txn),
None,
),
_ => TransactionListWithProof::new(
transaction_and_infos,
events,
Some(first_txn_version),
Some(proof_of_first_txn),
Some(proof_of_last_txn),
),
}
},
)
}
impl Arbitrary for TransactionListWithProof {
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
arb_transaction_list_with_proof().boxed()
}
type Strategy = BoxedStrategy<Self>;
}
impl Arbitrary for BlockMetaData {
type Parameters = SizeRange;
fn arbitrary_with(num_validators_range: Self::Parameters) -> Self::Strategy {
let signature_strategy = (any::<HashValue>(), Just(num_validators_range)).prop_flat_map(
|(hash, num_validators_range)| {
prop::collection::vec(arb_validator_signature_for_hash(hash), num_validators_range)
},
);
(
any::<HashValue>(),
any::<u64>(),
signature_strategy,
any::<AccountAddress>(),
)
.prop_map(|(id, timestamp, signatures, proposer)| {
BlockMetaData::new(id, timestamp, signatures.into_iter().collect(), proposer)
})
.boxed()
}
type Strategy = BoxedStrategy<Self>;
}
| 32.535874 | 104 | 0.605954 |
e9f12c6dd72c1ac64acdec47cb66ff08e316e3a9 | 678 | use crate::{BotResult, CommandData, Context};
use std::sync::Arc;
#[command]
#[short_desc("https://youtu.be/0jgrCKhxE1s?t=77")]
#[bucket("songs")]
#[no_typing()]
async fn fireandflames(ctx: Arc<Context>, data: CommandData) -> BotResult<()> {
let (lyrics, delay) = _fireandflames();
super::song_send(lyrics, delay, ctx, data).await
}
pub fn _fireandflames() -> (&'static [&'static str], u64) {
let lyrics = &[
"So far away we wait for the day-yay",
"For the lives all so wasted and gooone",
"We feel the pain of a lifetime lost in a thousand days",
"Through the fire and the flames we carry ooooooon",
];
(lyrics, 3000)
}
| 27.12 | 79 | 0.635693 |
4b3a45acf038286d82bc5a071ccb6c6886c4685b | 1,046 | mod opt;
mod options;
use crate::options::Options;
use anyhow::{bail, Context, Result};
use std::env;
use std::{fs, process::Command};
use structopt::StructOpt;
fn main() -> Result<()> {
let opts = Options::from_args();
let wizen = env::var("JAVY_WIZEN");
if wizen.eq(&Ok("1".into())) {
let wasm: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/engine.wasm"));
opt::Optimizer::new(wasm)
.optimize(true)
.write_optimized_wasm(opts.output)?;
env::remove_var("JAVY_WIZEN");
return Ok(());
}
let contents = fs::File::open(&opts.input)
.with_context(|| format!("Failed to open input file {}", opts.input.display()))?;
let self_cmd = env::args().next().unwrap();
env::set_var("JAVY_WIZEN", "1");
let status = Command::new(self_cmd)
.arg(&opts.input)
.arg("-o")
.arg(&opts.output)
.stdin(contents)
.status()?;
if !status.success() {
bail!("Couldn't create wasm from input");
}
Ok(())
}
| 24.325581 | 89 | 0.561185 |
bb569091c82895a1754d1a4318f1569351b93cac | 6,373 | use bitcoins::prelude::*;
use crate::esplora::*;
use crate::{provider::ProviderError, reqwest_utils};
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct MerkleProof {
pub block_height: usize,
pub merkle: Vec<String>,
pub pos: usize,
}
impl MerkleProof {
pub(crate) async fn fetch_by_txid(
client: &reqwest::Client,
api_root: &str,
txid: TXID,
) -> Result<Self, FetchError> {
let url = format!("{}/tx/{}/MerkleProof", api_root, txid.to_be_hex());
Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
}
}
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct BlockStatus {
pub in_best_chain: bool,
#[serde(default = "String::new")]
pub next_best: String,
}
impl BlockStatus {
pub(crate) async fn fetch_by_digest(
client: &reqwest::Client,
api_root: &str,
digest: BlockHash,
) -> Result<Self, FetchError> {
let url = format!("{}/block/{}/status", api_root, digest.to_be_hex());
Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
}
}
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct EsploraTxStatus {
pub confirmed: bool,
#[serde(default = "usize::min_value")]
pub block_height: usize,
#[serde(default = "String::new")]
pub block_hash: String,
}
impl EsploraTxStatus {
pub(crate) async fn fetch_by_txid(
client: &reqwest::Client,
api_root: &str,
txid: TXID,
) -> Result<Self, FetchError> {
let url = format!("{}/tx/{}/status", api_root, txid.to_be_hex());
Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
}
}
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct EsploraTx {
pub status: EsploraTxStatus,
pub txid: String,
}
impl EsploraTx {
pub(crate) async fn fetch_by_txid(
client: &reqwest::Client,
api_root: &str,
txid: TXID,
) -> Result<Self, FetchError> {
let url = format!("{}/tx/{}", api_root, txid.to_be_hex());
Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
}
}
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct EsploraUTXO {
/// TXID in BE format
pub txid: String,
/// Index in vout
pub vout: usize,
/// UTXO value
pub value: usize,
}
impl EsploraUTXO {
pub(crate) async fn fetch_by_address(
client: &reqwest::Client,
api_root: &str,
addr: &Address,
) -> Result<Vec<EsploraUTXO>, FetchError> {
let url = format!("{}/address/{}/utxo", api_root, addr.as_string());
Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
}
pub(crate) fn into_utxo(self, addr: &Address) -> Result<UTXO, ProviderError> {
let script_pubkey = bitcoins::Net::decode_address(addr);
let outpoint = BitcoinOutpoint::from_explorer_format(
TXID::deserialize_hex(&self.txid)?,
self.vout as u32,
);
let spend_script = SpendScript::from_script_pubkey(&script_pubkey);
Ok(UTXO::new(
outpoint,
self.value as u64,
script_pubkey,
spend_script,
))
}
}
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct Outspend {
/// Whether the output has been spent
pub spent: bool,
/// The TXID that spend it
#[serde(default = "String::new")]
pub txid_be: String,
/// The index of the spending input in that transaction's Vin
#[serde(default = "usize::max_value")]
pub vin: usize,
/// The status of the spending TX
pub status: EsploraTxStatus,
}
impl Outspend {
/// Fetch an Outspend by an outpoint referencing it
pub(crate) async fn fetch_by_outpoint(
client: &reqwest::Client,
api_root: &str,
outpoint: &BitcoinOutpoint,
) -> Result<Option<Outspend>, FetchError> {
let txid_be = outpoint.txid_be_hex();
let idx = outpoint.idx;
Outspend::fetch_one(client, api_root, &txid_be, idx).await
}
/// Fetch the outspend at a specific index. If this index does not exist, an error will be
/// returned.
pub(crate) async fn fetch_one(
client: &reqwest::Client,
api_root: &str,
txid_be_hex: &str,
idx: u32,
) -> Result<Option<Outspend>, FetchError> {
let url = format!("{}/tx/{}/outspend/{}", api_root, txid_be_hex, idx);
let o: Outspend = reqwest_utils::ez_fetch_json(client, &url).await?;
if o.txid_be.is_empty() {
Ok(None)
} else {
Ok(Some(o))
}
}
}
#[derive(serde::Deserialize, Clone, Debug)]
pub(crate) struct EsploraBlock {
pub(crate) id: String,
pub(crate) height: usize,
pub(crate) version: u32,
pub(crate) timestamp: u32,
pub(crate) bits: u32,
pub(crate) nonce: u32,
// difficulty
pub(crate) merkle_root: String,
pub(crate) tx_count: usize,
pub(crate) size: usize,
pub(crate) weight: usize,
pub(crate) previousblockhash: String,
}
impl EsploraBlock {
pub(crate) fn serialize(&self) -> RawHeader {
let mut h = [0u8; 80];
h[0..4].copy_from_slice(&self.version.to_le_bytes());
h[4..36].copy_from_slice(
BlockHash::from_be_hex(&self.previousblockhash)
.expect("no malformed from API")
.as_slice(),
);
h[36..68].copy_from_slice(
BlockHash::from_be_hex(&self.merkle_root)
.expect("no malformed from API")
.as_slice(),
);
h[68..72].copy_from_slice(&self.timestamp.to_le_bytes());
h[72..76].copy_from_slice(&self.bits.to_le_bytes());
h[76..80].copy_from_slice(&self.nonce.to_le_bytes());
h.into()
}
pub(crate) async fn fetch_by_digest(
client: &reqwest::Client,
api_root: &str,
digest: BlockHash,
) -> Result<Self, FetchError> {
let url = format!("{}/block/{}", api_root, digest.to_be_hex());
Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
}
// pub(crate) async fn fetch_from_height(
// client: &reqwest::Client,
// api_root: &str,
// height: usize,
// ) -> Result<[Self; 10], FetchError> {
// let url = format!("{}/block/{}", api_root, height);
// Ok(reqwest_utils::ez_fetch_json(client, &url).await?)
// }
}
| 29.920188 | 94 | 0.600031 |
fca2298fc836233dccd75c87986a3b3904c2f112 | 372 | use extras::Get;
#[test]
fn test_blank() {
let extra = Get::default();
let raw: &[u8] = extra.as_ref();
assert_eq!(0, extra.flags());
assert_eq!([0; 4], raw);
}
#[test]
fn test_flags() {
let extra = Get::new(0xdeadbeef);
let raw: &[u8] = extra.as_ref();
assert_eq!(0xdeadbeef, extra.flags());
assert_eq!([0xde, 0xad, 0xbe, 0xef], raw);
}
| 20.666667 | 46 | 0.577957 |
50ac47a67e3f213d7e979ecc717e243dd13f8a8d | 11,771 | //! Contains definitions of the settings schema that are used to generate the configuration directory
//! and to format the output files.
use misc::{SettingsPage, SettingRow, SettingType};
pub const PAGE_LIST: &'static [&'static SettingsPage] = &[
&POSTGRES_SETTINGS,
&REDIS_SETTINGS,
&MM_SETTINGS,
&FXCM_SETTINGS,
&GENERAL_SETTINGS,
&COMMANDSERVER_QUERYSERVER_SETTINGS,
&RUNTIME_SETTINGS,
&FUZZER_SETTINGS,
&DATA_DOWNLOADER_SETTINGS,
];
pub const POSTGRES_SETTINGS: SettingsPage = SettingsPage {
name: "Postgres",
rows: &[
SettingRow {
id: "postgres_host",
name: "Host",
default: Some("localhost"),
setting_type: SettingType::String,
comment: None,
},
SettingRow {
id: "postgres_port",
name: "Port",
default: Some("5432"),
setting_type:
SettingType::Usize,
comment: None,
},
SettingRow {id: "postgres_user",
name: "Username",
default: None,
setting_type:
SettingType::String,
comment: None,
},
SettingRow {
id: "postgres_password",
name: "Password",
default: None,
setting_type: SettingType::String,
comment: None,
},
SettingRow {id: "postgres_db",
name: "Database",
default: None,
setting_type:
SettingType::String,
comment: None,
},
],
comment: Some(&["PostgreSQL Settings"]),
};
pub const REDIS_SETTINGS: SettingsPage = SettingsPage {
name: "Redis",
rows: &[
SettingRow {
id: "redis_host",
name: "Host",
default: Some("redis://localhost:6379/"),
setting_type: SettingType::String,
comment: Some("In this format: redis://hostname:port/"),
},
],
comment: Some(&["Redis Settings"]),
};
pub const MM_SETTINGS: SettingsPage = SettingsPage {
name: "Management/Monitoring Web Interface",
rows: &[
SettingRow {
id: "mm_cache_size",
name: "Message Cache Size",
default: Some("2500"),
setting_type: SettingType::Usize,
comment: Some("How many of each Command, Response, and Log Line to keep in memory"),
},
SettingRow {
id: "store_buffer_size",
name: "Tantivy Document Store Buffer Size",
default: Some("50000000"),
setting_type: SettingType::Usize,
comment: Some("How large (in bytes) to make the Tantivy writer buffer for the document store"),
},
],
comment: Some(&["Settings for configuring the Management/Monitoring Web Interface."]),
};
pub const FXCM_SETTINGS: SettingsPage = SettingsPage {
name: "FXCM",
rows: &[
SettingRow {
id: "fxcm_username",
name: "Username",
default: None,
setting_type: SettingType::String,
comment: None,
},
SettingRow {
id: "fxcm_password",
name: "Password",
default: None,
setting_type: SettingType::String,
comment: None,
},
SettingRow {
id: "fxcm_url",
name: "URL",
default: Some("http://www.fxcorporate.com/Hosts.jsp"),
setting_type: SettingType::String,
comment: Some("Path to the `Hosts.jsp` file for the FXCM API."),
},
SettingRow {
id: "fxcm_pin",
name: "PIN (Optional)",
default: Some(""),
setting_type: SettingType::OptionString,
comment: None,
},
],
comment: Some(&[
"FXCM Broker Settings. Should be valid credentials for a FXCM broker account. You can get",
"// a practice account that is compatible with the platform here for free with no account creation",
"// or registration required: https://www.fxcm.com/forex-trading-demo/",
])
};
pub const GENERAL_SETTINGS: SettingsPage = SettingsPage {
name: "General",
rows: &[
SettingRow {
id: "redis_responses_channel",
name: "Responses Channel",
default: Some("responses"),
setting_type: SettingType::String,
comment: Some("Changing this will currently break the platform; it's just here for backwards compatibility."),
},
SettingRow {
id: "redis_control_channel",
name: "Control Channel",
default: Some("control"),
setting_type: SettingType::String,
comment: Some("Changing this will currently break the platform; it's just here for backwards compatibility."),
},
SettingRow {
id: "redis_log_channel",
name: "Log Channel",
default: Some("log"),
setting_type: SettingType::String,
comment: Some("The redis pub/sub channel on which log messages will be sent."),
},
SettingRow {
id: "data_dir",
name: "Data Directory",
default: None,
setting_type: SettingType::String,
comment: Some("Data directory for the platform where things like historical ticks and settings are stored."),
},
SettingRow {
id: "websocket_port",
name: "MM Websocket Port",
default: Some("7037"),
setting_type: SettingType::Usize,
comment: Some("This port must be open on the host in order for the client to communicate over it."),
},
SettingRow {
id: "mm_port",
name: "MM Port",
default: Some("8002"),
setting_type: SettingType::Usize,
comment: Some("The port the MM web GUI will listen on. Deprecated."),
},
SettingRow {
id: "node_binary_path",
name: "NodeJS Binary Path",
default: None,
setting_type: SettingType::String,
comment: Some("The absolute path to the `node` binary."),
},
SettingRow {
id: "redis_server_binary_path",
name: "Redis Server Path",
default: Some(""),
setting_type: SettingType::OptionString,
comment: Some("The absolute path to the `redis-server` executable. Empty if Redis is installed remotely."),
},
SettingRow {
id: "logger_persistance_table",
name: "Logger Table Name",
default: Some("logs"),
setting_type: SettingType::String,
comment: None,
},
],
comment: None,
};
pub const COMMANDSERVER_QUERYSERVER_SETTINGS: SettingsPage = SettingsPage {
name: "CommandServer + QueryServer Settings",
rows: &[
SettingRow {
id: "cs_timeout",
name: "CommandServer Timeout",
default: Some("399"),
setting_type: SettingType::Usize,
comment: Some(indoc!(
"The timeout of commands sent in ms. If a response isn't recieved within the timeout window, \
the command is re-sent."
)),
},
SettingRow {
id: "conn_senders",
name: "CommandServer Worker Count",
default: Some("4"),
setting_type: SettingType::Usize,
comment: None,
},
SettingRow {
id: "cs_max_retries",
name: "Max CommandServer message retransmit attempts",
default: Some("3"),
setting_type: SettingType::Usize,
comment: None,
},
SettingRow {
id: "qs_connections",
name: "QueryServer Worker Count",
default: Some("4"),
setting_type: SettingType::Usize,
comment: None,
},
SettingRow {
id: "database_conns",
name: "QueryServer DB Connection Count",
default: Some("10"),
setting_type: SettingType::Usize,
comment: None,
},
],
comment: Some(&["CommandServer/QueryServer settings. You can leave these at defaults safely."]),
};
pub const RUNTIME_SETTINGS: SettingsPage = SettingsPage {
name: "Runtime Settings",
rows: &[
SettingRow {
id: "kill_stragglers",
name: "Kill Stragglers",
default: Some("true"),
setting_type: SettingType::Boolean,
comment: Some("If instances from a previous spawner are detected when the spawner spawns, kill them?"),
},
SettingRow {
id: "reset_db_on_load",
name: "Reset DB On Load",
default: Some("false"),
setting_type: SettingType::Boolean,
comment: Some("If true, entire PostgreSQL database will be wiped every time a Tick Processor is spawned."),
},
],
comment: None,
};
pub const FUZZER_SETTINGS: SettingsPage = SettingsPage {
name: "Fuzzer Settings",
comment: Some(&[
"Settings for configuring the fuzzer strategy used to test broker shims.",
"// For more info, see README.md in /private/strategies/fuzzer",
]),
rows: &[
SettingRow {
id: "fuzzer_deterministic_rng",
name: "Use Deterministic RNG",
default: Some("true"),
setting_type: SettingType::Boolean,
comment: Some("Set if the RNG used to generate the actions of the fuzzer should be seeded with the same seed every run."),
},
SettingRow {
id: "fuzzer_seed",
name: "Seed String",
default: Some("S0 R4nD0m"),
setting_type: SettingType::String,
comment: Some("The string from which the fuzzer's RNG is seeded from (if the option is enabled)."),
},
],
};
pub const DATA_DOWNLOADER_SETTINGS: SettingsPage = SettingsPage {
name: "Data Downloader Settings",
comment: Some(&["Settings pertaining to the platform's data downloaders used to record or retreive data from external sources"]),
rows: &[
SettingRow {
id: "iex_data_downloader_tops_url",
name: "IEX TOPS API URL",
default: Some("https://ws-api.iextrading.com/1.0/tops"),
setting_type: SettingType::String,
comment: Some("The API endpoint for the IEX top-of-book API. Should be good default unless they change it. If they do, please open an issue."),
},
SettingRow {
id: "poloniex_ws_api_url",
name: "Poloniex WS API URL",
default: Some("wss://api.poloniex.com"),
setting_type: SettingType::String,
comment: Some("The API endpoint for the Poloniex websocket API. Should be good as default; if it's changed please open an issue."),
},
SettingRow {
id: "poloniex_http_api_url",
name: "Poloniex HTTP API URL",
default: Some("https://poloniex.com/public"),
setting_type: SettingType::String,
comment: Some("The API endpoint for the Poloniex HTTP API. Should be good as default; if it's changed please open an issue."),
},
SettingRow {
id: "poloniex_ws_cache_size",
name: "Poloniex WebSocket Message Cache Size",
default: Some("500"),
setting_type: SettingType::Usize,
comment: Some("MUST BE MULTIPLE OF 10! How many messages to buffer before flushing into the sink. The buffer is used to catch unordered messages."),
},
],
};
| 35.778116 | 162 | 0.5607 |
f9c10761ae24aa4550da1eafdfbb2e05ddbd4156 | 18,993 | use crate::network::{IoEvent, Network};
use crate::user_config::UserConfig;
use super::util::{Flag, Format, FormatType, JumpDirection, Type};
use anyhow::{anyhow, Result};
use rand::{thread_rng, Rng};
use rspotify::model::{context::CurrentlyPlaybackContext, PlayingItem};
pub struct CliApp<'a> {
pub net: Network<'a>,
pub config: UserConfig,
}
// Non-concurrent functions
// I feel that async in a cli is not working
// I just .await all processes and directly interact
// by calling network.handle_network_event
impl<'a> CliApp<'a> {
pub fn new(net: Network<'a>, config: UserConfig) -> Self {
Self { net, config }
}
async fn is_a_saved_track(&mut self, id: &str) -> bool {
// Update the liked_song_ids_set
self
.net
.handle_network_event(IoEvent::CurrentUserSavedTracksContains(
vec![id.to_string()],
))
.await;
self.net.app.lock().await.liked_song_ids_set.contains(id)
}
pub fn format_output(&self, mut format: String, values: Vec<Format>) -> String {
for val in values {
format = format.replace(val.get_placeholder(), &val.inner(self.config.clone()));
}
// Replace unsupported flags with 'None'
for p in &["%a", "%b", "%t", "%p", "%h", "%u", "%d", "%v", "%f", "%s"] {
format = format.replace(p, "None");
}
format.trim().to_string()
}
// spt playback -t
pub async fn toggle_playback(&mut self) {
let context = self.net.app.lock().await.current_playback_context.clone();
if let Some(c) = context {
if c.is_playing {
self.net.handle_network_event(IoEvent::PausePlayback).await;
return;
}
}
self
.net
.handle_network_event(IoEvent::StartPlayback(None, None, None))
.await;
}
// spt pb --share-track (share the current playing song)
// Basically copy-pasted the 'copy_song_url' function
pub async fn share_track_or_episode(&mut self) -> Result<String> {
let app = self.net.app.lock().await;
if let Some(CurrentlyPlaybackContext {
item: Some(item), ..
}) = &app.current_playback_context
{
match item {
PlayingItem::Track(track) => Ok(format!(
"https://open.spotify.com/track/{}",
track.id.to_owned().unwrap_or_default()
)),
PlayingItem::Episode(episode) => Ok(format!(
"https://open.spotify.com/episode/{}",
episode.id.to_owned()
)),
}
} else {
Err(anyhow!(
"failed to generate a shareable url for the current song"
))
}
}
// spt pb --share-album (share the current album)
// Basically copy-pasted the 'copy_album_url' function
pub async fn share_album_or_show(&mut self) -> Result<String> {
let app = self.net.app.lock().await;
if let Some(CurrentlyPlaybackContext {
item: Some(item), ..
}) = &app.current_playback_context
{
match item {
PlayingItem::Track(track) => Ok(format!(
"https://open.spotify.com/album/{}",
track.album.id.to_owned().unwrap_or_default()
)),
PlayingItem::Episode(episode) => Ok(format!(
"https://open.spotify.com/show/{}",
episode.show.id.to_owned()
)),
}
} else {
Err(anyhow!(
"failed to generate a shareable url for the current song"
))
}
}
// spt ... -d ... (specify device to control)
pub async fn set_device(&mut self, name: String) -> Result<()> {
// Change the device if specified by user
let mut app = self.net.app.lock().await;
let mut device_index = 0;
if let Some(dp) = &app.devices {
for (i, d) in dp.devices.iter().enumerate() {
if d.name == name {
device_index = i;
// Save the id of the device
self
.net
.client_config
.set_device_id(d.id.clone())
.map_err(|_e| anyhow!("failed to use device with name '{}'", d.name))?;
}
}
} else {
// Error out if no device is available
return Err(anyhow!("no device available"));
}
app.selected_device_index = Some(device_index);
Ok(())
}
// spt query ... --limit LIMIT (set max search limit)
pub async fn update_query_limits(&mut self, max: String) -> Result<()> {
let num = max
.parse::<u32>()
.map_err(|_e| anyhow!("limit must be between 1 and 50"))?;
// 50 seems to be the maximum limit
if num > 50 || num == 0 {
return Err(anyhow!("limit must be between 1 and 50"));
};
self
.net
.handle_network_event(IoEvent::UpdateSearchLimits(num, num))
.await;
Ok(())
}
pub async fn volume(&mut self, vol: String) -> Result<()> {
let num = vol
.parse::<u32>()
.map_err(|_e| anyhow!("volume must be between 0 and 100"))?;
// Check if it's in range
if num > 100 {
return Err(anyhow!("volume must be between 0 and 100"));
};
self
.net
.handle_network_event(IoEvent::ChangeVolume(num as u8))
.await;
Ok(())
}
// spt playback --next / --previous
pub async fn jump(&mut self, d: &JumpDirection) {
match d {
JumpDirection::Next => self.net.handle_network_event(IoEvent::NextTrack).await,
JumpDirection::Previous => self.net.handle_network_event(IoEvent::PreviousTrack).await,
}
}
// spt query -l ...
pub async fn list(&mut self, item: Type, format: &str, offset: Option<u32>) -> String {
match item {
Type::Device => {
if let Some(devices) = &self.net.app.lock().await.devices {
devices
.devices
.iter()
.map(|d| {
self.format_output(
format.to_string(),
vec![
Format::Device(d.name.clone()),
Format::Volume(d.volume_percent),
],
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
"No devices available".to_string()
}
}
Type::Playlist => {
self.net.handle_network_event(IoEvent::GetPlaylists(offset)).await;
if let Some(playlists) = &self.net.app.lock().await.playlists {
playlists
.items
.iter()
.map(|p| {
self.format_output(
format.to_string(),
Format::from_type(FormatType::Playlist(Box::new(p.clone()))),
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
"No playlists found".to_string()
}
}
Type::Liked => {
self
.net
.handle_network_event(IoEvent::GetCurrentSavedTracks(offset))
.await;
let liked_songs = self
.net
.app
.lock()
.await
.track_table
.tracks
.iter()
.map(|t| {
self.format_output(
format.to_string(),
Format::from_type(FormatType::Track(Box::new(t.clone()))),
)
})
.collect::<Vec<String>>();
// Check if there are any liked songs
if liked_songs.is_empty() {
"No liked songs found".to_string()
} else {
liked_songs.join("\n")
}
}
// Enforced by clap
_ => unreachable!(),
}
}
// spt playback --transfer DEVICE
pub async fn transfer_playback(&mut self, device: &str) -> Result<()> {
// Get the device id by name
let mut id = String::new();
if let Some(devices) = &self.net.app.lock().await.devices {
for d in &devices.devices {
if d.name == device {
id.push_str(d.id.as_str());
break;
}
}
};
if id.is_empty() {
Err(anyhow!("no device with name '{}'", device))
} else {
self
.net
.handle_network_event(IoEvent::TransferPlaybackToDevice(id.to_string()))
.await;
Ok(())
}
}
pub async fn seek(&mut self, seconds_str: String) -> Result<()> {
let seconds = match seconds_str.parse::<i32>() {
Ok(s) => s.abs() as u32,
Err(_) => return Err(anyhow!("failed to convert seconds to i32")),
};
let (current_pos, duration) = {
self
.net
.handle_network_event(IoEvent::GetCurrentPlayback)
.await;
let app = self.net.app.lock().await;
if let Some(CurrentlyPlaybackContext {
progress_ms: Some(ms),
item: Some(item),
..
}) = &app.current_playback_context
{
let duration = match item {
PlayingItem::Track(track) => track.duration_ms,
PlayingItem::Episode(episode) => episode.duration_ms,
};
(*ms as u32, duration)
} else {
return Err(anyhow!("no context available"));
}
};
// Convert secs to ms
let ms = seconds * 1000;
// Calculate new positon
let position_to_seek = if seconds_str.starts_with('+') {
current_pos + ms
} else if seconds_str.starts_with('-') {
// Jump to the beginning if the position_to_seek would be
// negative, must be checked before the calculation to avoid
// an 'underflow'
if ms > current_pos {
0u32
} else {
current_pos - ms
}
} else {
// Absolute value of the track
seconds * 1000
};
// Check if position_to_seek is greater than duration (next track)
if position_to_seek > duration {
self.jump(&JumpDirection::Next).await;
} else {
// This seeks to a position in the current song
self
.net
.handle_network_event(IoEvent::Seek(position_to_seek))
.await;
}
Ok(())
}
// spt playback --like / --dislike / --shuffle / --repeat
pub async fn mark(&mut self, flag: Flag) -> Result<()> {
let c = {
let app = self.net.app.lock().await;
app
.current_playback_context
.clone()
.ok_or_else(|| anyhow!("no context available"))?
};
match flag {
Flag::Like(s) => {
// Get the id of the current song
let id = match c.item {
Some(i) => match i {
PlayingItem::Track(t) => t.id.ok_or_else(|| anyhow!("item has no id")),
PlayingItem::Episode(_) => Err(anyhow!("saving episodes not yet implemented")),
},
None => Err(anyhow!("no item playing")),
}?;
// Want to like but is already liked -> do nothing
// Want to like and is not liked yet -> like
if s && !self.is_a_saved_track(&id).await {
self
.net
.handle_network_event(IoEvent::ToggleSaveTrack(id))
.await;
// Want to dislike but is already disliked -> do nothing
// Want to dislike and is liked currently -> remove like
} else if !s && self.is_a_saved_track(&id).await {
self
.net
.handle_network_event(IoEvent::ToggleSaveTrack(id))
.await;
}
}
Flag::Shuffle => {
self
.net
.handle_network_event(IoEvent::Shuffle(c.shuffle_state))
.await
}
Flag::Repeat => {
self
.net
.handle_network_event(IoEvent::Repeat(c.repeat_state))
.await;
}
}
Ok(())
}
// spt playback -s
pub async fn get_status(&mut self, format: String) -> Result<String> {
// Update info on current playback
self
.net
.handle_network_event(IoEvent::GetCurrentPlayback)
.await;
self
.net
.handle_network_event(IoEvent::GetCurrentSavedTracks(None))
.await;
let context = self
.net
.app
.lock()
.await
.current_playback_context
.clone()
.ok_or_else(|| anyhow!("no context available"))?;
let playing_item = context.item.ok_or_else(|| anyhow!("no track playing"))?;
let mut hs = match playing_item {
PlayingItem::Track(track) => {
let id = track.id.clone().unwrap_or_default();
let mut hs = Format::from_type(FormatType::Track(Box::new(track.clone())));
if let Some(ms) = context.progress_ms {
hs.push(Format::Position((ms, track.duration_ms)))
}
hs.push(Format::Flags((
context.repeat_state,
context.shuffle_state,
self.is_a_saved_track(&id).await,
)));
hs
}
PlayingItem::Episode(episode) => {
let mut hs = Format::from_type(FormatType::Episode(Box::new(episode.clone())));
if let Some(ms) = context.progress_ms {
hs.push(Format::Position((ms, episode.duration_ms)))
}
hs.push(Format::Flags((
context.repeat_state,
context.shuffle_state,
false,
)));
hs
}
};
hs.push(Format::Device(context.device.name));
hs.push(Format::Volume(context.device.volume_percent));
hs.push(Format::Playing(context.is_playing));
Ok(self.format_output(format, hs))
}
// spt play -u URI
pub async fn play_uri(&mut self, uri: String, queue: bool, random: bool) {
let offset = if random {
// Only works with playlists for now
if uri.contains("spotify:playlist:") {
let id = uri.split(':').last().unwrap();
match self.net.spotify.playlist(id, None, None).await {
Ok(p) => {
let num = p.tracks.total;
Some(thread_rng().gen_range(0..num) as usize)
}
Err(e) => {
self
.net
.app
.lock()
.await
.handle_error(anyhow!(e.to_string()));
return;
}
}
} else {
None
}
} else {
None
};
if uri.contains("spotify:track:") {
if queue {
self
.net
.handle_network_event(IoEvent::AddItemToQueue(uri))
.await;
} else {
self
.net
.handle_network_event(IoEvent::StartPlayback(
None,
Some(vec![uri.clone()]),
Some(0),
))
.await;
}
} else {
self
.net
.handle_network_event(IoEvent::StartPlayback(Some(uri.clone()), None, offset))
.await;
}
}
// spt play -n NAME ...
pub async fn play(&mut self, name: String, item: Type, queue: bool, random: bool) -> Result<()> {
self
.net
.handle_network_event(IoEvent::GetSearchResults(name.clone(), None))
.await;
// Get the uri of the first found
// item + the offset or return an error message
let uri = {
let results = &self.net.app.lock().await.search_results;
match item {
Type::Track => {
if let Some(r) = &results.tracks {
r.items[0].uri.clone()
} else {
return Err(anyhow!("no tracks with name '{}'", name));
}
}
Type::Album => {
if let Some(r) = &results.albums {
let album = &r.items[0];
if let Some(uri) = &album.uri {
uri.clone()
} else {
return Err(anyhow!("album {} has no uri", album.name));
}
} else {
return Err(anyhow!("no albums with name '{}'", name));
}
}
Type::Artist => {
if let Some(r) = &results.artists {
r.items[0].uri.clone()
} else {
return Err(anyhow!("no artists with name '{}'", name));
}
}
Type::Show => {
if let Some(r) = &results.shows {
r.items[0].uri.clone()
} else {
return Err(anyhow!("no shows with name '{}'", name));
}
}
Type::Playlist => {
if let Some(r) = &results.playlists {
let p = &r.items[0];
// For a random song, create a random offset
p.uri.clone()
} else {
return Err(anyhow!("no playlists with name '{}'", name));
}
}
_ => unreachable!(),
}
};
// Play or queue the uri
self.play_uri(uri, queue, random).await;
Ok(())
}
// spt query -s SEARCH ...
pub async fn query(&mut self, search: String, format: String, item: Type) -> String {
self
.net
.handle_network_event(IoEvent::GetSearchResults(search.clone(), None))
.await;
let app = self.net.app.lock().await;
match item {
Type::Playlist => {
if let Some(results) = &app.search_results.playlists {
results
.items
.iter()
.map(|r| {
self.format_output(
format.clone(),
Format::from_type(FormatType::Playlist(Box::new(r.clone()))),
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
format!("no playlists with name '{}'", search)
}
}
Type::Track => {
if let Some(results) = &app.search_results.tracks {
results
.items
.iter()
.map(|r| {
self.format_output(
format.clone(),
Format::from_type(FormatType::Track(Box::new(r.clone()))),
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
format!("no tracks with name '{}'", search)
}
}
Type::Artist => {
if let Some(results) = &app.search_results.artists {
results
.items
.iter()
.map(|r| {
self.format_output(
format.clone(),
Format::from_type(FormatType::Artist(Box::new(r.clone()))),
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
format!("no artists with name '{}'", search)
}
}
Type::Show => {
if let Some(results) = &app.search_results.shows {
results
.items
.iter()
.map(|r| {
self.format_output(
format.clone(),
Format::from_type(FormatType::Show(Box::new(r.clone()))),
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
format!("no shows with name '{}'", search)
}
}
Type::Album => {
if let Some(results) = &app.search_results.albums {
results
.items
.iter()
.map(|r| {
self.format_output(
format.clone(),
Format::from_type(FormatType::Album(Box::new(r.clone()))),
)
})
.collect::<Vec<String>>()
.join("\n")
} else {
format!("no albums with name '{}'", search)
}
}
// Enforced by clap
_ => unreachable!(),
}
}
}
| 28.603916 | 99 | 0.51956 |
1e0dcb3161b5b960c31656de505713d3f98aab25 | 24,605 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
chain_state::{ChainState, SystemExecutionContext, TransactionExecutionContext},
counters::*,
data_cache::{BlockDataCache, RemoteCache},
move_vm::MoveVM,
system_module_names::*,
system_txn::block_metadata_processor::process_block_metadata,
VMExecutor, VMVerifier,
};
use libra_config::config::{VMConfig, VMPublishingOption};
use libra_crypto::HashValue;
use libra_logger::prelude::*;
use libra_state_view::StateView;
use libra_types::{
block_metadata::BlockMetadata,
byte_array::ByteArray,
transaction::{
ChangeSet, SignatureCheckedTransaction, SignedTransaction, Transaction,
TransactionArgument, TransactionOutput, TransactionPayload, TransactionStatus,
MAX_TRANSACTION_SIZE_IN_BYTES,
},
vm_error::{sub_status, StatusCode, VMStatus},
write_set::WriteSet,
};
use rayon::prelude::*;
use std::sync::Arc;
use vm::errors::convert_prologue_runtime_error;
use vm::{
errors::VMResult,
gas_schedule::{self, AbstractMemorySize, CostTable, GasAlgebra, GasCarrier, GasUnits},
transaction_metadata::TransactionMetadata,
};
use vm_runtime_types::value::Value;
#[derive(Clone)]
/// A wrapper to make VMRuntime standalone and thread safe.
pub struct LibraVM {
move_vm: Arc<MoveVM>,
gas_schedule: Option<CostTable>,
config: VMConfig,
}
impl LibraVM {
pub fn new(config: &VMConfig) -> Self {
let inner = MoveVM::new();
Self {
move_vm: Arc::new(inner),
gas_schedule: None,
config: config.clone(),
}
}
fn load_gas_schedule(&mut self, data_cache: &dyn RemoteCache) {
let mut ctx = SystemExecutionContext::new(data_cache, GasUnits::new(0));
self.gas_schedule = self.move_vm.load_gas_schedule(&mut ctx, data_cache).ok();
}
fn get_gas_schedule(&self) -> VMResult<&CostTable> {
self.gas_schedule.as_ref().ok_or_else(|| {
VMStatus::new(StatusCode::VM_STARTUP_FAILURE)
.with_sub_status(sub_status::VSF_GAS_SCHEDULE_NOT_FOUND)
})
}
fn check_payload(
&self,
payload: &TransactionPayload,
state_view: &dyn StateView,
) -> VMResult<()> {
match payload {
// TODO: Remove WriteSet from TransactionPayload.
TransactionPayload::WriteSet(change_set) => {
self.check_change_set(change_set, state_view)
}
TransactionPayload::Script(script) => {
if !is_allowed_script(&self.config.publishing_options, &script.code()) {
warn!("[VM] Custom scripts not allowed: {:?}", &script.code());
Err(VMStatus::new(StatusCode::UNKNOWN_SCRIPT))
} else {
Ok(())
}
}
TransactionPayload::Module(_module) => {
if !&self.config.publishing_options.is_open() {
warn!("[VM] Custom modules not allowed");
Err(VMStatus::new(StatusCode::UNKNOWN_MODULE))
} else {
Ok(())
}
}
TransactionPayload::Program => Err(VMStatus::new(StatusCode::UNKNOWN_SCRIPT)),
}
}
fn check_change_set(&self, change_set: &ChangeSet, state_view: &dyn StateView) -> VMResult<()> {
// TODO: Replace this logic with actual checks.
if state_view.is_genesis() {
for (_access_path, write_op) in change_set.write_set() {
// Genesis transactions only add entries, never delete them.
if write_op.is_deletion() {
error!("[VM] Bad genesis block");
// TODO: return more detailed error somehow?
return Err(VMStatus::new(StatusCode::INVALID_WRITE_SET));
}
}
Ok(())
} else {
Err(VMStatus::new(StatusCode::REJECTED_WRITE_SET))
}
}
fn check_gas(&self, txn: &SignedTransaction) -> VMResult<()> {
// Do not check gas limit for writeset transaction.
if let TransactionPayload::WriteSet(_) = txn.payload() {
return Ok(());
}
let raw_bytes_len = AbstractMemorySize::new(txn.raw_txn_bytes_len() as GasCarrier);
// The transaction is too large.
if txn.raw_txn_bytes_len() > MAX_TRANSACTION_SIZE_IN_BYTES {
let error_str = format!(
"max size: {}, txn size: {}",
MAX_TRANSACTION_SIZE_IN_BYTES,
raw_bytes_len.get()
);
warn!(
"[VM] Transaction size too big {} (max {})",
raw_bytes_len.get(),
MAX_TRANSACTION_SIZE_IN_BYTES
);
return Err(
VMStatus::new(StatusCode::EXCEEDED_MAX_TRANSACTION_SIZE).with_message(error_str)
);
}
// Check is performed on `txn.raw_txn_bytes_len()` which is the same as
// `raw_bytes_len`
assume!(raw_bytes_len.get() <= MAX_TRANSACTION_SIZE_IN_BYTES as u64);
// The submitted max gas units that the transaction can consume is greater than the
// maximum number of gas units bound that we have set for any
// transaction.
if txn.max_gas_amount() > gas_schedule::MAXIMUM_NUMBER_OF_GAS_UNITS.get() {
let error_str = format!(
"max gas units: {}, gas units submitted: {}",
gas_schedule::MAXIMUM_NUMBER_OF_GAS_UNITS.get(),
txn.max_gas_amount()
);
warn!(
"[VM] Gas unit error; max {}, submitted {}",
gas_schedule::MAXIMUM_NUMBER_OF_GAS_UNITS.get(),
txn.max_gas_amount()
);
return Err(
VMStatus::new(StatusCode::MAX_GAS_UNITS_EXCEEDS_MAX_GAS_UNITS_BOUND)
.with_message(error_str),
);
}
// The submitted transactions max gas units needs to be at least enough to cover the
// intrinsic cost of the transaction as calculated against the size of the
// underlying `RawTransaction`
let min_txn_fee = gas_schedule::calculate_intrinsic_gas(raw_bytes_len);
if txn.max_gas_amount() < min_txn_fee.get() {
let error_str = format!(
"min gas required for txn: {}, gas submitted: {}",
min_txn_fee.get(),
txn.max_gas_amount()
);
warn!(
"[VM] Gas unit error; min {}, submitted {}",
min_txn_fee.get(),
txn.max_gas_amount()
);
return Err(
VMStatus::new(StatusCode::MAX_GAS_UNITS_BELOW_MIN_TRANSACTION_GAS_UNITS)
.with_message(error_str),
);
}
// The submitted gas price is less than the minimum gas unit price set by the VM.
// NB: MIN_PRICE_PER_GAS_UNIT may equal zero, but need not in the future. Hence why
// we turn off the clippy warning.
#[allow(clippy::absurd_extreme_comparisons)]
let below_min_bound = txn.gas_unit_price() < gas_schedule::MIN_PRICE_PER_GAS_UNIT.get();
if below_min_bound {
let error_str = format!(
"gas unit min price: {}, submitted price: {}",
gas_schedule::MIN_PRICE_PER_GAS_UNIT.get(),
txn.gas_unit_price()
);
warn!(
"[VM] Gas unit error; min {}, submitted {}",
gas_schedule::MIN_PRICE_PER_GAS_UNIT.get(),
txn.gas_unit_price()
);
return Err(
VMStatus::new(StatusCode::GAS_UNIT_PRICE_BELOW_MIN_BOUND).with_message(error_str)
);
}
// The submitted gas price is greater than the maximum gas unit price set by the VM.
if txn.gas_unit_price() > gas_schedule::MAX_PRICE_PER_GAS_UNIT.get() {
let error_str = format!(
"gas unit max price: {}, submitted price: {}",
gas_schedule::MAX_PRICE_PER_GAS_UNIT.get(),
txn.gas_unit_price()
);
warn!(
"[VM] Gas unit error; min {}, submitted {}",
gas_schedule::MAX_PRICE_PER_GAS_UNIT.get(),
txn.gas_unit_price()
);
return Err(
VMStatus::new(StatusCode::GAS_UNIT_PRICE_ABOVE_MAX_BOUND).with_message(error_str)
);
}
Ok(())
}
fn verify_transaction_impl(
&self,
transaction: &SignatureCheckedTransaction,
gas_schedule: VMResult<&CostTable>,
state_view: &dyn StateView,
remote_cache: &dyn RemoteCache,
) -> VMResult<VerifiedTranscationPayload> {
let mut ctx = SystemExecutionContext::new(remote_cache, GasUnits::new(0));
self.check_gas(transaction)?;
self.check_payload(transaction.payload(), state_view)?;
let txn_data = TransactionMetadata::new(transaction);
match transaction.payload() {
TransactionPayload::Program => Err(VMStatus::new(StatusCode::UNKNOWN_SCRIPT)),
TransactionPayload::Script(script) => {
self.run_prologue(gas_schedule, &mut ctx, &txn_data)?;
Ok(VerifiedTranscationPayload::Script(
script.code().to_vec(),
script.args().to_vec(),
))
}
TransactionPayload::Module(module) => {
self.run_prologue(gas_schedule, &mut ctx, &txn_data)?;
Ok(VerifiedTranscationPayload::Module(module.code().to_vec()))
}
TransactionPayload::WriteSet(_) => Err(VMStatus::new(StatusCode::UNREACHABLE)),
}
}
fn execute_verified_payload(
&mut self,
remote_cache: &mut BlockDataCache<'_>,
txn_data: &TransactionMetadata,
payload: VerifiedTranscationPayload,
) -> TransactionOutput {
let mut ctx = TransactionExecutionContext::new(txn_data.max_gas_amount(), remote_cache);
// TODO: The logic for handling falied transaction fee is pretty ugly right now. Fix it later.
let mut failed_gas_left = GasUnits::new(0);
match payload {
VerifiedTranscationPayload::Module(m) => {
self.move_vm.publish_module(m, &mut ctx, txn_data)
}
VerifiedTranscationPayload::Script(s, args) => {
let gas_schedule = match self.get_gas_schedule() {
Ok(s) => s,
Err(e) => return discard_error_output(e),
};
self.move_vm.execute_script(
s,
gas_schedule,
&mut ctx,
txn_data,
convert_txn_args(args),
)
}
}
.map_err(|err| {
failed_gas_left = ctx.gas_left();
err
})
.and_then(|_| {
failed_gas_left = ctx.gas_left();
let mut gas_free_ctx = SystemExecutionContext::from(ctx);
self.run_epilogue(&mut gas_free_ctx, txn_data)
.and_then(|_| gas_free_ctx.get_transaction_output(txn_data, Ok(())))
})
.unwrap_or_else(|err| {
let mut gas_free_ctx = SystemExecutionContext::new(remote_cache, failed_gas_left);
self.run_epilogue(&mut gas_free_ctx, txn_data)
.and_then(|_| gas_free_ctx.get_transaction_output(txn_data, Err(err)))
.unwrap_or_else(discard_error_output)
})
}
fn execute_user_transaction(
&mut self,
state_view: &dyn StateView,
remote_cache: &mut BlockDataCache<'_>,
txn: &SignatureCheckedTransaction,
) -> TransactionOutput {
let txn_data = TransactionMetadata::new(txn);
let verified_payload = record_stats! {time_hist | TXN_VERIFICATION_TIME_TAKEN | {
self.verify_transaction_impl(txn, self.get_gas_schedule(), state_view, remote_cache)
}};
let result = verified_payload
.and_then(|verified_payload| {
record_stats! {time_hist | TXN_EXECUTION_TIME_TAKEN | {
Ok(self.execute_verified_payload(
remote_cache,
&txn_data,
verified_payload,
))
}}
})
.unwrap_or_else(discard_error_output);
if let TransactionStatus::Keep(_) = result.status() {
remote_cache.push_write_set(result.write_set())
};
result
}
fn process_change_set(
&mut self,
remote_cache: &mut BlockDataCache<'_>,
change_set: ChangeSet,
) -> TransactionOutput {
let (write_set, events) = change_set.into_inner();
remote_cache.push_write_set(&write_set);
self.load_gas_schedule(remote_cache);
TransactionOutput::new(
write_set,
events,
0,
VMStatus::new(StatusCode::EXECUTED).into(),
)
}
/// Run the prologue of a transaction by calling into `PROLOGUE_NAME` function stored
/// in the `ACCOUNT_MODULE` on chain.
fn run_prologue<T: ChainState>(
&self,
gas_schedule: VMResult<&CostTable>,
chain_state: &mut T,
txn_data: &TransactionMetadata,
) -> VMResult<()> {
let txn_sequence_number = txn_data.sequence_number();
let txn_public_key = txn_data.public_key().to_bytes().to_vec();
let txn_gas_price = txn_data.gas_unit_price().get();
let txn_max_gas_units = txn_data.max_gas_amount().get();
let txn_expiration_time = txn_data.expiration_time();
record_stats! {time_hist | TXN_PROLOGUE_TIME_TAKEN | {
self.move_vm
.execute_function(
&ACCOUNT_MODULE,
&PROLOGUE_NAME,
gas_schedule?,
chain_state,
&txn_data,
vec![
Value::u64(txn_sequence_number),
Value::byte_array(ByteArray::new(txn_public_key)),
Value::u64(txn_gas_price),
Value::u64(txn_max_gas_units),
Value::u64(txn_expiration_time),
],
)
.map_err(|err| convert_prologue_runtime_error(&err, &txn_data.sender))
}
}
}
/// Run the epilogue of a transaction by calling into `EPILOGUE_NAME` function stored
/// in the `ACCOUNT_MODULE` on chain.
fn run_epilogue<T: ChainState>(
&self,
chain_state: &mut T,
txn_data: &TransactionMetadata,
) -> VMResult<()> {
let txn_sequence_number = txn_data.sequence_number();
let txn_gas_price = txn_data.gas_unit_price().get();
let txn_max_gas_units = txn_data.max_gas_amount().get();
let gas_remaining = chain_state.remaining_gas().get();
record_stats! {time_hist | TXN_EPILOGUE_TIME_TAKEN | {
self.move_vm.execute_function(
&ACCOUNT_MODULE,
&EPILOGUE_NAME,
self.get_gas_schedule()?,
chain_state,
&txn_data,
vec![
Value::u64(txn_sequence_number),
Value::u64(txn_gas_price),
Value::u64(txn_max_gas_units),
Value::u64(gas_remaining),
],
)
}
}
}
fn execute_block_impl(
&mut self,
transactions: Vec<Transaction>,
state_view: &dyn StateView,
) -> VMResult<Vec<TransactionOutput>> {
let count = transactions.len();
let mut result = vec![];
let blocks = chunk_block_transactions(transactions);
let mut data_cache = BlockDataCache::new(state_view);
self.load_gas_schedule(&data_cache);
for block in blocks {
match block {
TransactionBlock::UserTransaction(txns) => {
let mut outs =
self.execute_user_transactions(txns, &mut data_cache, state_view)?;
result.append(&mut outs);
}
TransactionBlock::BlockPrologue(block_metadata) => {
result.push(self.move_vm.execute_runtime(|runtime| {
process_block_metadata(block_metadata, runtime, &mut data_cache)
})?)
}
TransactionBlock::WriteSet(change_set) => result.push(
self.check_change_set(&change_set, state_view)
.map(|_| self.process_change_set(&mut data_cache, change_set))
.unwrap_or_else(discard_error_output),
),
}
}
report_block_count(count);
Ok(result)
}
fn execute_user_transactions(
&mut self,
txn_block: Vec<SignedTransaction>,
data_cache: &mut BlockDataCache<'_>,
state_view: &dyn StateView,
) -> VMResult<Vec<TransactionOutput>> {
let signature_verified_block: Vec<Result<SignatureCheckedTransaction, VMStatus>> =
txn_block
.into_par_iter()
.map(|txn| {
txn.check_signature()
.map_err(|_| VMStatus::new(StatusCode::INVALID_SIGNATURE))
})
.collect();
let mut result = vec![];
for transaction in signature_verified_block {
record_stats! {time_hist | TXN_TOTAL_TIME_TAKEN | {
let output = match transaction {
Ok(txn) => self.execute_user_transaction(state_view, data_cache, &txn),
Err(e) => discard_error_output(e),
};
report_execution_status(output.status());
// `result` is initially empty, a single element is pushed per loop iteration and
// the number of iterations is bound to the max size of `signature_verified_block`
assume!(result.len() < usize::max_value());
result.push(output);
}
}
}
Ok(result)
}
}
pub(crate) fn discard_error_output(err: VMStatus) -> TransactionOutput {
// Since this transaction will be discarded, no writeset will be included.
TransactionOutput::new(
WriteSet::default(),
vec![],
0,
TransactionStatus::Discard(err),
)
}
// Validators external API
impl VMVerifier for LibraVM {
/// Determine if a transaction is valid. Will return `None` if the transaction is accepted,
/// `Some(Err)` if the VM rejects it, with `Err` as an error code. Verification performs the
/// following steps:
/// 1. The signature on the `SignedTransaction` matches the public key included in the
/// transaction
/// 2. The script to be executed is under given specific configuration.
/// 3. Invokes `LibraAccount.prologue`, which checks properties such as the transaction has the
/// right sequence number and the sender has enough balance to pay for the gas.
/// TBD:
/// 1. Transaction arguments matches the main function's type signature.
/// We don't check this item for now and would execute the check at execution time.
fn validate_transaction(
&self,
transaction: SignedTransaction,
state_view: &dyn StateView,
) -> Option<VMStatus> {
let data_cache = BlockDataCache::new(state_view);
record_stats! {time_hist | TXN_VALIDATION_TIME_TAKEN | {
let mut ctx = SystemExecutionContext::new(&data_cache, GasUnits::new(0));
let gas_schedule = self.move_vm.load_gas_schedule(&mut ctx, &data_cache);
let signature_verified_txn = match transaction.check_signature() {
Ok(t) => t,
Err(_) => return Some(VMStatus::new(StatusCode::INVALID_SIGNATURE)),
};
let res = match self.verify_transaction_impl(&signature_verified_txn, gas_schedule.as_ref().map_err(|err| err.clone()), state_view, &data_cache) {
Ok(_) => None,
Err(err) => {
if err.major_status == StatusCode::SEQUENCE_NUMBER_TOO_NEW {
None
} else {
Some(convert_prologue_runtime_error(&err, &signature_verified_txn.sender()))
}
}
};
report_verification_status(&res);
res
}
}
}
}
// Executor external API
impl VMExecutor for LibraVM {
/// Execute a block of `transactions`. The output vector will have the exact same length as the
/// input vector. The discarded transactions will be marked as `TransactionStatus::Discard` and
/// have an empty `WriteSet`. Also `state_view` is immutable, and does not have interior
/// mutability. Writes to be applied to the data view are encoded in the write set part of a
/// transaction output.
fn execute_block(
transactions: Vec<Transaction>,
config: &VMConfig,
state_view: &dyn StateView,
) -> VMResult<Vec<TransactionOutput>> {
let mut vm = LibraVM::new(config);
vm.execute_block_impl(transactions, state_view)
}
}
/// Transactions divided by transaction flow.
/// Transaction flows are different across different types of transactions.
pub enum TransactionBlock {
UserTransaction(Vec<SignedTransaction>),
WriteSet(ChangeSet),
BlockPrologue(BlockMetadata),
}
pub fn chunk_block_transactions(txns: Vec<Transaction>) -> Vec<TransactionBlock> {
let mut blocks = vec![];
let mut buf = vec![];
for txn in txns {
match txn {
Transaction::BlockMetadata(data) => {
if !buf.is_empty() {
blocks.push(TransactionBlock::UserTransaction(buf));
buf = vec![];
}
blocks.push(TransactionBlock::BlockPrologue(data));
}
Transaction::WriteSet(cs) => {
if !buf.is_empty() {
blocks.push(TransactionBlock::UserTransaction(buf));
buf = vec![];
}
blocks.push(TransactionBlock::WriteSet(cs));
}
Transaction::UserTransaction(txn) => {
if let TransactionPayload::WriteSet(cs) = txn.payload() {
if !buf.is_empty() {
blocks.push(TransactionBlock::UserTransaction(buf));
buf = vec![];
}
blocks.push(TransactionBlock::WriteSet(cs.clone()));
} else {
buf.push(txn);
}
}
}
}
if !buf.is_empty() {
blocks.push(TransactionBlock::UserTransaction(buf));
}
blocks
}
enum VerifiedTranscationPayload {
Script(Vec<u8>, Vec<TransactionArgument>),
Module(Vec<u8>),
}
pub fn is_allowed_script(publishing_option: &VMPublishingOption, program: &[u8]) -> bool {
match publishing_option {
VMPublishingOption::Open | VMPublishingOption::CustomScripts => true,
VMPublishingOption::Locked(whitelist) => {
let hash_value = HashValue::from_sha3_256(program);
whitelist.contains(hash_value.as_ref())
}
}
}
/// Convert the transaction arguments into move values.
fn convert_txn_args(args: Vec<TransactionArgument>) -> Vec<Value> {
args.into_iter()
.map(|arg| match arg {
TransactionArgument::U64(i) => Value::u64(i),
TransactionArgument::Address(a) => Value::address(a),
TransactionArgument::Bool(b) => Value::bool(b),
TransactionArgument::ByteArray(b) => Value::byte_array(b),
})
.collect()
}
#[test]
fn vm_thread_safe() {
fn assert_send<T: Send>() {}
fn assert_sync<T: Sync>() {}
assert_send::<LibraVM>();
assert_sync::<LibraVM>();
assert_send::<MoveVM>();
assert_sync::<MoveVM>();
}
| 39.494382 | 162 | 0.567364 |
095accc55fd447dbcb5d9b03f9f53eec0f135597 | 2,496 | use hoi4save::{Encoding, Hoi4Extractor, PdsDate};
use std::error::Error;
mod utils;
#[test]
fn test_hoi4_text() -> Result<(), Box<dyn Error>> {
let data = utils::request("1.10-normal-text.zip");
let (save, encoding) = Hoi4Extractor::builder().extract_save(&data)?;
assert_eq!(encoding, Encoding::Plaintext);
assert_eq!(save.player, String::from("FRA"));
assert_eq!(
save.date.game_fmt().to_string(),
String::from("1936.1.1.12")
);
Ok(())
}
#[cfg(ironman)]
#[test]
fn test_hoi4_normal_bin() -> Result<(), Box<dyn Error>> {
let data = utils::request("1.10-normal.zip");
let (save, encoding) = Hoi4Extractor::builder().extract_save(&data)?;
assert_eq!(encoding, Encoding::Binary);
assert_eq!(save.player, String::from("FRA"));
assert_eq!(
save.date.game_fmt().to_string(),
String::from("1936.1.1.12")
);
Ok(())
}
#[cfg(ironman)]
#[test]
fn test_hoi4_ironman() -> Result<(), Box<dyn Error>> {
let data = utils::request("1.10-ironman.zip");
let (save, encoding) = Hoi4Extractor::builder().extract_save(&data)?;
assert_eq!(encoding, Encoding::Binary);
assert_eq!(save.player, String::from("FRA"));
assert_eq!(
save.date.game_fmt().to_string(),
String::from("1936.1.1.12")
);
Ok(())
}
#[cfg(ironman)]
#[test]
fn test_normal_roundtrip() -> Result<(), Box<dyn Error>> {
let data = utils::request("1.10-normal.zip");
let (melted, _tokens) = hoi4save::Melter::new()
.with_on_failed_resolve(hoi4save::FailedResolveStrategy::Error)
.melt(&data[..])
.unwrap();
let (save, encoding) = Hoi4Extractor::builder().extract_save(&melted)?;
assert_eq!(encoding, Encoding::Plaintext);
assert_eq!(save.player, String::from("FRA"));
assert_eq!(
save.date.game_fmt().to_string(),
String::from("1936.1.1.12")
);
Ok(())
}
#[cfg(ironman)]
#[test]
fn test_ironman_roundtrip() -> Result<(), Box<dyn Error>> {
let data = utils::request("1.10-ironman.zip");
let (melted, _tokens) = hoi4save::Melter::new()
.with_on_failed_resolve(hoi4save::FailedResolveStrategy::Error)
.melt(&data[..])
.unwrap();
let (save, encoding) = Hoi4Extractor::builder().extract_save(&melted)?;
assert_eq!(encoding, Encoding::Plaintext);
assert_eq!(save.player, String::from("FRA"));
assert_eq!(
save.date.game_fmt().to_string(),
String::from("1936.1.1.12")
);
Ok(())
}
| 29.714286 | 75 | 0.613782 |
ed87eb5ef6a999078ae45df1e9f53351bbf0fbc0 | 16,758 | //! # A simple, type-safe and opinionated graphics crate
//!
//! luminance is an effort to make graphics rendering simple and elegant. It is a _low-level_
//! and opinionated graphics API, highly typed (type-level computations, refined types, etc.)
//! which aims to be simple and performant. Instead of providing users with as many low-level
//! features as possible, luminance provides you with _some ways_ to do rendering. That has
//! both advantages and drawbacks:
//!
//! - On one side, because the API is opinionated, some dynamic branching and decisions are
//! completely removed / optimized. Some operations breaking state mutations or invariant
//! violation are not statically constructible, ensuring safety. Because strong typing is
//! used, lots of runtime checks are also not needed, helping with performance.
//! - On the other side, if you want to do something very specific and very low-level, you
//! will find luminance not to be friendly as it doesn’t like, most of the time, exposing
//! its internal design to the outer world — mostly for runtime safety reason.
//!
//! > A note on _safety_: here, _safety_ is not used as with the Rust definiton, but most in
//! > terms of undefined behavior and unwanted behavior. If something can lead to a weird
//! > behavior, a crash, a panic or a black screen, it’s considered `unsafe`. That definition
//! > obviously includes the Rust definiton of safety — memory safety.
//!
//! # Feature flags
//!
//! None so far.
//!
//! # What’s included?
//!
//! luminance is a rendering crate, not a 3D engine nor a video game framework. As so, it doesn’t
//! include specific concepts, such as lights, materials, asset management nor scene description. It
//! only provides a rendering library you can plug in whatever you want to.
//!
//! > There are several so-called 3D-engines out there on [crates.io](https://crates.io). Feel free
//! > to have a look around.
//!
//! However, luminance comes with several interesting features:
//!
//! - **Framebuffers**: framebuffers are used to hold renders. Each time you want to perform a
//! render, you need to perform it into a framebuffer. Framebuffers can then be combined with
//! each other to produce effects and design render layers — this is called compositing.
//! - **Shaders**: luminance supports five kinds of shader stages:
//! - Vertex shaders.
//! - Tessellation control shaders.
//! - Tessellation evaluation shaders.
//! - Geometry shaders.
//! - Fragment shaders.
//! - **Vertices, indices, primitives and tessellations**: those are used to define a shape you
//! can render into a framebuffer with a shader. They are mandatory when it comes to rendering.
//! Even if you don’t need vertex data, you still need tessellations to issue draw calls.
//! - **Textures**: textures represent information packed into arrays on the GPU, and can be used
//! to customize a visual aspect or pass information around in shaders. They come in several
//! flavours — e.g. 1D, 2D, cube maps, etc.
//! - **Control on the render state**: the render state is a set of capabilities you can tweak
//! to draw frames. It includes:
//! - The blending equation and factors. Blending is the process of taking two colors from two
//! framebuffers and mixing them.
//! - Whether we should have a depth test performed.
//! - Face culling.
//! - Etc.
//! - And a lot of other cool things like *GPU commands*, *pipelines*, *uniform interfaces* and so
//! on…
//!
//! # How to dig in?
//!
//! luminance is written to be fairly simple. There are several ways to learn how to use luminance:
//!
//! - The [online documentation](https://docs.rs/luminance) is a mandatory start for newcomers.
//! - The [“Learn luminance” book](https://rust-tutorials.github.io/learn-luminance). Ideal for
//! newcomers as well as people already used to luminance, as it’s always updated to the latest
//! version — you might learn new things!
//! - The [luminance-examples](https://github.com/phaazon/luminance-rs/tree/master/luminance-examples)
//! project. It contains lots of examples describing how to do specifics things. Not adapted for
//! newcomers, you will likely want to consult those examples if you’re already familiar with
//! graphics programing and to look for how to do a specific thing.
//!
//! # Implementation and architecture
//!
//! **luminance** has been originally designed around the OpenGL 3.3 and OpenGL 4.5 APIs. However,
//! it has mutated to adapt to new technologies and modern graphics programming. Even though its API
//! is _not_ meant to converge towards something like Vulkan, it’s changing over time to meet
//! better design decisions and performance implications.
//!
//! The current state of luminance comprises several crates:
//!
//! - A “core” crate, [luminance], which is about all the
//! abstract, common and interface code.
//! - A set of _backend implementation_ crates, implementing the [luminance] crate.
//! - A set of _windowing_ crates, executing your code written with the core and backend crate.
//! - A special crate, [luminance-front], a special _backend_ crate that allows to combine
//! several “official” crates to provide a cross-platform experience without having to pick
//! several backend crates — the crate does it for you. This crate is mainly designed for end-user
//! crates.
//!
//! ## The core crate
//!
//! The luminance crate gathers all the logic and rendering abstractions necessary to write code
//! over various graphics technologies. It contains parametric types and functions that depend on
//! the actual _implementation type_ — as a convention, the type variable `B` (for backend) is
//! used.
//!
//! Backend types — i.e. `B` — are not provided by [luminance] directly. They are typically
//! provided by crates containing the name of the technology as suffix, such as luminance-gl,
//! luminance-webgl, luminance-vk, etc. The interface between those backend crates and
//! luminance is specified in [luminance::backend].
//!
//! On a general note, `Something<ConcreteType, u8>` is a monomorphic type that will be usable
//! **only** with code working over the `ConcreteType` backend. If you want to write a function
//! that accepts an 8-bit integer something without specifying a concrete type, you will have to
//! write something along the lines of:
//!
//! ```ignore
//! use luminance::backend::something::Something as SomethingBackend;
//! use luminance::something::Something;
//!
//! fn work<B>(b: &Something<B, u8>) where B: SomethingBackend<u8> {
//! todo!();
//! }
//! ```
//!
//! This kind of code is intented for people writing libraries with luminance. For the special case
//! of using the [luminance-front] crate, you will end up writing something like:
//!
//! ```ignore
//! use luminance_front::something::Something;
//!
//! fn work(b: &Something<u8>) {
//! todo()!;
//! }
//! ```
//!
//! > In [luminance-front], the backend type is selected at compile and link time. This is often
//! > what people want, but keep in mind that [luminance-front] doesn’t allow to have several
//! > backend types at the same time, which might be something you would like to use, too.
//!
//! ## Backend implementations
//!
//! Backends implement the [luminance::backend] traits and provide, mostly, a single type for each
//! implementation. It’s important to understand that a backend crate can provide several backends
//! (for instance, [luminance-gl] can provide one backend — so one type — for each supported OpenGL
//! version). That backend type will be used throughout the rest of the ecosystem to deduce subsequent
//! implementors and associated types.
//!
//! If you want to implement a backend, you don’t have to push any code to any `luminance` crate.
//! `luminance-*` crates are _official_ ones, but you can write your own backend as well. The
//! interface is highly `unsafe`, though, and based mostly on `unsafe impl` on `unsafe trait`. For
//! more information, feel free to read the documentation of the [luminance::backend] module.
//!
//! ## Windowing
//!
//! luminance doesn’t know anything about the context it executes in. That means that it doesn’t
//! know whether it’s used within SDL, GLFW, glutin, Qt, a web canvas or an embedded specific hardware such as
//! the Nintendo Switch. That is actually powerful, because it allows luminance to be
//! completely agnostic of the execution platform it’s running on: one problem less. However, there
//! is an important point to take into account: a single backend type can be used with several windowing
//! crates / implementations. That allows to re-use a backend with several windowing
//! implementations. The backend will typically explain what are the conditions to create it (like,
//! in OpenGL, the windowing crate must set some specific flags when creating the OpenGL context).
//!
//! luminance does not provide a way to create windows because it’s important that it not depend
//! on windowing libraries – so that end-users can use whatever they like. Furthermore, such
//! libraries typically implement windowing and events features, which have nothing to do with our
//! initial purpose.
//!
//! A windowing crate supporting luminance will typically provide native types by re-exporting
//! symbols (types, functions, etc.) from a windowing crate and the necessary code to make it
//! compatible with luminance. That means providing a way to access a backend type, which
//! implements the [luminance::backend] interface.
//!
//! ## luminance-derive
//!
//! If you are compiling against the `"derive"` feature, you get access to [`luminance-derive`] automatically, which
//! provides a set of _procedural macros_.
//!
//! ### `Vertex`
//!
//! The [`Vertex`] derive proc-macro.
//!
//! That proc-macro allows you to create custom vertex types easily without having to care about
//! implementing the required traits for your types to be usable with the rest of the crate.
//!
//! The [`Vertex`] trait must be implemented if you want to use a type as vertex (passed-in via
//! slices to [`Tess`]). Either you can decide to implement it on your own, or you could just let
//! this crate do the job for you.
//!
//! > Important: the [`Vertex`] trait is `unsafe`, which means that all of its implementors must be
//! > as well. This is due to the fact that vertex formats include information about raw-level
//! > GPU memory and a bad implementation can have undefined behaviors.
//!
//! You can derive the [`Vertex`] trait if your type follows these conditions:
//!
//! - It must be a `struct` with named fields. This is just a temporary limitation that will get
//! dropped as soon as the crate is stable enough.
//! - Its fields must have a type that implements [`VertexAttrib`]. This is mandatory so that the
//! backend knows enough about the types used in the structure to correctly align memory, pick
//! the right types, etc.
//! - Its fields must have a type that implements [`HasSemantics`] as well. This trait is just a
//! type family that associates a single constant (i.e. the semantics) that the vertex attribute
//! uses.
//! - Each field's type must be different.
//!
//! Once all those requirements are met, you can derive [`Vertex`] pretty easily.
//!
//! > Note: feel free to look at the [`Semantics`] proc-macro as well, that provides a way
//! > to generate semantics types in order to completely both implement [`Semantics`] for an
//! > `enum` of your choice, but also generate *field* types you can use when defining your vertex
//! > type.
//!
//! The syntax is the following:
//!
//! ```rust
//! # use luminance_derive::{Vertex, Semantics};
//!
//! // visit the Semantics proc-macro documentation for further details
//! #[derive(Clone, Copy, Debug, PartialEq, Semantics)]
//! pub enum Semantics {
//! #[sem(name = "position", repr = "[f32; 3]", wrapper = "VertexPosition")]
//! Position,
//! #[sem(name = "color", repr = "[f32; 4]", wrapper = "VertexColor")]
//! Color
//! }
//!
//! #[derive(Clone, Copy, Debug, PartialEq, Vertex)] // just add Vertex to the list of derived traits
//! #[vertex(sem = "Semantics")] // specify the semantics to use for this type
//! struct MyVertex {
//! position: VertexPosition,
//! color: VertexColor
//! }
//! ```
//!
//! > Note: the `Semantics` enum must be public because of the implementation of [`HasSemantics`]
//! > trait.
//!
//! Besides the `Semantics`-related code, this will:
//!
//! - Create a type called `MyVertex`, a struct that will hold a single vertex.
//! - Implement `Vertex for MyVertex`.
//!
//! The proc-macro also supports an optional `#[vertex(instanced = "<bool>")]` struct attribute.
//! This attribute allows you to specify whether the fields are to be instanced or not. For more
//! about that, have a look at [`VertexInstancing`].
//!
//! ### `Semantics`
//!
//! The [`Semantics`] derive proc-macro.
//!
//! ### `UniformInterface`
//!
//! The [`UniformInterface`] derive proc-macro.
//!
//! The procedural macro is very simple to use. You declare a struct as you would normally do:
//!
//! ```
//! # use luminance::shader::Uniform;
//! # use luminance_derive::UniformInterface;
//!
//! #[derive(Debug, UniformInterface)]
//! struct MyIface {
//! time: Uniform<f32>,
//! resolution: Uniform<[f32; 4]>
//! }
//! ```
//!
//! The effect of this declaration is declaring the `MyIface` struct along with an effective
//! implementation of `UniformInterface` that will try to get the `"time"` and `"resolution"`
//! uniforms in the corresponding shader program. If any of the two uniforms fails to map (inactive
//! uniform, for instance), the whole struct cannot be generated, and an error is arisen (see
//! `UniformInterface::uniform_interface`’s documentation for further details).
//!
//! If you don’t use a parameter in your shader, you might not want the whole interface to fail
//! building if that parameter cannot be mapped. You can do that via the `#[unbound]` field
//! attribute:
//!
//! ```
//! # use luminance::shader::Uniform;
//! # use luminance_derive::UniformInterface;
//!
//! #[derive(Debug, UniformInterface)]
//! struct MyIface {
//! #[uniform(unbound)]
//! time: Uniform<f32>, // if this field cannot be mapped, it’ll be ignored
//! resolution: Uniform<[f32; 4]>
//! }
//! ```
//!
//! You can also change the default mapping with the `#[uniform(name = "string_mapping")]`
//! attribute. This changes the name that must be queried from the shader program for the mapping
//! to be complete:
//!
//! ```
//! # use luminance::shader::Uniform;
//! # use luminance_derive::UniformInterface;
//!
//! #[derive(Debug, UniformInterface)]
//! struct MyIface {
//! time: Uniform<f32>,
//! #[uniform(name = "res")]
//! resolution: Uniform<[f32; 4]> // maps "res" from the shader program
//! }
//! ```
//!
//! Finally, you can mix both attributes if you want to change the mapping and have an unbound
//! uniform if it cannot be mapped:
//!
//! ```
//! # use luminance::shader::Uniform;
//! # use luminance_derive::UniformInterface;
//!
//! #[derive(Debug, UniformInterface)]
//! struct MyIface {
//! time: Uniform<f32>,
//! #[uniform(name = "res", unbound)]
//! resolution: Uniform<[f32; 4]> // must map "res" from the shader program and ignored otherwise
//! }
//! ```
//!
//!
//! [luminance]: https://crates.io/crates/luminance
//! [luminance-gl]: https://crates.io/crates/luminance-gl
//! [luminance-front]: https://crates.io/crates/luminance-front
//! [luminance::backend]: crate::backend
//! [`Semantics`]: https://docs.rs/luminance/latest/luminance/vertex/trait.Semantics.html
//! [`HasSemantics`]: https://docs.rs/luminance/latest/luminance/vertex/trait.HasSemantics.html
//! [`Tess`]: https://docs.rs/luminance/latest/luminance/tess/struct.Tess.html
//! [`Vertex`]: https://docs.rs/luminance/latest/luminance/vertex/trait.Vertex.html
//! [`VertexAttrib`]: https://docs.rs/luminance/latest/luminance/vertex/trait.VertexAttrib.html
//! [`VertexInstancing`]: https://docs.rs/luminance/latest/luminance/vertex/enum.VertexInstancing.html
//! [`UniformInterface`]: https://docs.rs/luminance/latest/luminance/shader/program/trait.UniformInterface.html
#![doc(
html_logo_url = "https://github.com/phaazon/luminance-rs/blob/master/docs/imgs/luminance_alt.svg"
)]
#![deny(missing_docs)]
#[cfg(feature = "derive")]
pub use luminance_derive::*;
pub mod backend;
pub mod blending;
pub mod context;
pub mod depth_test;
pub mod face_culling;
pub mod framebuffer;
pub mod pipeline;
pub mod pixel;
pub mod query;
pub mod render_gate;
pub mod render_state;
pub mod scissor;
pub mod shader;
pub mod shading_gate;
pub mod tess;
pub mod tess_gate;
pub mod texture;
pub mod vertex;
| 47.338983 | 116 | 0.708617 |
c17a54f4f69bbb98717992d487c0d0c8a1e7afba | 25,372 | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Generalized type folding mechanism. The setup is a bit convoluted
//! but allows for convenient usage. Let T be an instance of some
//! "foldable type" (one which implements `TypeFoldable`) and F be an
//! instance of a "folder" (a type which implements `TypeFolder`). Then
//! the setup is intended to be:
//!
//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F)
//!
//! This way, when you define a new folder F, you can override
//! `fold_T()` to customize the behavior, and invoke `T.super_fold_with()`
//! to get the original behavior. Meanwhile, to actually fold
//! something, you can just write `T.fold_with(F)`, which is
//! convenient. (Note that `fold_with` will also transparently handle
//! things like a `Vec<T>` where T is foldable and so on.)
//!
//! In this ideal setup, the only function that actually *does*
//! anything is `T.super_fold_with()`, which traverses the type `T`.
//! Moreover, `T.super_fold_with()` should only ever call `T.fold_with()`.
//!
//! In some cases, we follow a degenerate pattern where we do not have
//! a `fold_T` method. Instead, `T.fold_with` traverses the structure directly.
//! This is suboptimal because the behavior cannot be overridden, but it's
//! much less work to implement. If you ever *do* need an override that
//! doesn't exist, it's not hard to convert the degenerate pattern into the
//! proper thing.
//!
//! A `TypeFoldable` T can also be visited by a `TypeVisitor` V using similar setup:
//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V).
//! These methods return true to indicate that the visitor has found what it is looking for
//! and does not need to visit anything else.
use ty::subst::Substs;
use ty::adjustment;
use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
use std::fmt;
use util::nodemap::{FxHashMap, FxHashSet};
/// The TypeFoldable trait is implemented for every type that can be folded.
/// Basically, every type that has a corresponding method in TypeFolder.
pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self;
fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
self.super_fold_with(folder)
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool;
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.super_visit_with(visitor)
}
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.visit_with(&mut HasEscapingRegionsVisitor { depth: depth })
}
fn has_escaping_regions(&self) -> bool {
self.has_regions_escaping_depth(0)
}
fn has_type_flags(&self, flags: TypeFlags) -> bool {
self.visit_with(&mut HasTypeFlagsVisitor { flags: flags })
}
fn has_projection_types(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_PROJECTION)
}
fn references_error(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_TY_ERR)
}
fn has_param_types(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_PARAMS)
}
fn has_self_ty(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_SELF)
}
fn has_infer_types(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_TY_INFER)
}
fn needs_infer(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_TY_INFER | TypeFlags::HAS_RE_INFER)
}
fn needs_subst(&self) -> bool {
self.has_type_flags(TypeFlags::NEEDS_SUBST)
}
fn has_re_skol(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_RE_SKOL)
}
fn has_closure_types(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_TY_CLOSURE)
}
fn has_erasable_regions(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_RE_EARLY_BOUND |
TypeFlags::HAS_RE_INFER |
TypeFlags::HAS_FREE_REGIONS)
}
fn is_normalized_for_trans(&self) -> bool {
!self.has_type_flags(TypeFlags::HAS_RE_EARLY_BOUND |
TypeFlags::HAS_RE_INFER |
TypeFlags::HAS_FREE_REGIONS |
TypeFlags::HAS_TY_INFER |
TypeFlags::HAS_PARAMS |
TypeFlags::HAS_NORMALIZABLE_PROJECTION |
TypeFlags::HAS_TY_ERR |
TypeFlags::HAS_SELF)
}
/// Indicates whether this value references only 'global'
/// types/lifetimes that are the same regardless of what fn we are
/// in. This is used for caching. Errs on the side of returning
/// false.
fn is_global(&self) -> bool {
!self.has_type_flags(TypeFlags::HAS_LOCAL_NAMES)
}
}
/// The TypeFolder trait defines the actual *folding*. There is a
/// method defined for every foldable type. Each of these has a
/// default implementation that does an "identity" fold. Within each
/// identity fold, it should invoke `foo.fold_with(self)` to fold each
/// sub-item.
pub trait TypeFolder<'gcx: 'tcx, 'tcx> : Sized {
fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>;
fn fold_binder<T>(&mut self, t: &Binder<T>) -> Binder<T>
where T : TypeFoldable<'tcx>
{
t.super_fold_with(self)
}
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
t.super_fold_with(self)
}
fn fold_mt(&mut self, t: &ty::TypeAndMut<'tcx>) -> ty::TypeAndMut<'tcx> {
t.super_fold_with(self)
}
fn fold_impl_header(&mut self, imp: &ty::ImplHeader<'tcx>) -> ty::ImplHeader<'tcx> {
imp.super_fold_with(self)
}
fn fold_substs(&mut self,
substs: &'tcx Substs<'tcx>)
-> &'tcx Substs<'tcx> {
substs.super_fold_with(self)
}
fn fold_fn_sig(&mut self,
sig: &ty::FnSig<'tcx>)
-> ty::FnSig<'tcx> {
sig.super_fold_with(self)
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
r.super_fold_with(self)
}
fn fold_autoref(&mut self, ar: &adjustment::AutoBorrow<'tcx>)
-> adjustment::AutoBorrow<'tcx> {
ar.super_fold_with(self)
}
}
pub trait TypeVisitor<'tcx> : Sized {
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
t.super_visit_with(self)
}
fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
t.super_visit_with(self)
}
fn visit_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>) -> bool {
trait_ref.super_visit_with(self)
}
fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
r.super_visit_with(self)
}
}
///////////////////////////////////////////////////////////////////////////
// Some sample folders
pub struct BottomUpFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a, F>
where F: FnMut(Ty<'tcx>) -> Ty<'tcx>
{
pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
pub fldop: F,
}
impl<'a, 'gcx, 'tcx, F> TypeFolder<'gcx, 'tcx> for BottomUpFolder<'a, 'gcx, 'tcx, F>
where F: FnMut(Ty<'tcx>) -> Ty<'tcx>,
{
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
let t1 = ty.super_fold_with(self);
(self.fldop)(t1)
}
}
///////////////////////////////////////////////////////////////////////////
// Region folder
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// Collects the free and escaping regions in `value` into `region_set`. Returns
/// whether any late-bound regions were skipped
pub fn collect_regions<T>(self,
value: &T,
region_set: &mut FxHashSet<ty::Region<'tcx>>)
-> bool
where T : TypeFoldable<'tcx>
{
let mut have_bound_regions = false;
self.fold_regions(value, &mut have_bound_regions, |r, d| {
region_set.insert(self.mk_region(r.from_depth(d)));
r
});
have_bound_regions
}
/// Folds the escaping and free regions in `value` using `f`, and
/// sets `skipped_regions` to true if any late-bound region was found
/// and skipped.
pub fn fold_regions<T,F>(self,
value: &T,
skipped_regions: &mut bool,
mut f: F)
-> T
where F : FnMut(ty::Region<'tcx>, u32) -> ty::Region<'tcx>,
T : TypeFoldable<'tcx>,
{
value.fold_with(&mut RegionFolder::new(self, skipped_regions, &mut f))
}
}
/// Folds over the substructure of a type, visiting its component
/// types and all regions that occur *free* within it.
///
/// That is, `Ty` can contain function or method types that bind
/// regions at the call site (`ReLateBound`), and occurrences of
/// regions (aka "lifetimes") that are bound within a type are not
/// visited by this folder; only regions that occur free will be
/// visited by `fld_r`.
pub struct RegionFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
skipped_regions: &'a mut bool,
current_depth: u32,
fld_r: &'a mut (FnMut(ty::Region<'tcx>, u32) -> ty::Region<'tcx> + 'a),
}
impl<'a, 'gcx, 'tcx> RegionFolder<'a, 'gcx, 'tcx> {
pub fn new<F>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
skipped_regions: &'a mut bool,
fld_r: &'a mut F) -> RegionFolder<'a, 'gcx, 'tcx>
where F : FnMut(ty::Region<'tcx>, u32) -> ty::Region<'tcx>
{
RegionFolder {
tcx: tcx,
skipped_regions: skipped_regions,
current_depth: 1,
fld_r: fld_r,
}
}
}
impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFolder<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_depth += 1;
let t = t.super_fold_with(self);
self.current_depth -= 1;
t
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
match *r {
ty::ReLateBound(debruijn, _) if debruijn.depth < self.current_depth => {
debug!("RegionFolder.fold_region({:?}) skipped bound region (current depth={})",
r, self.current_depth);
*self.skipped_regions = true;
r
}
_ => {
debug!("RegionFolder.fold_region({:?}) folding free region (current_depth={})",
r, self.current_depth);
(self.fld_r)(r, self.current_depth)
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Late-bound region replacer
// Replaces the escaping regions in a type.
struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
current_depth: u32,
fld_r: &'a mut (FnMut(ty::BoundRegion) -> ty::Region<'tcx> + 'a),
map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn replace_late_bound_regions<T,F>(self,
value: &Binder<T>,
mut f: F)
-> (T, FxHashMap<ty::BoundRegion, ty::Region<'tcx>>)
where F : FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
T : TypeFoldable<'tcx>,
{
let mut replacer = RegionReplacer::new(self, &mut f);
let result = value.skip_binder().fold_with(&mut replacer);
(result, replacer.map)
}
/// Flattens two binding levels into one. So `for<'a> for<'b> Foo`
/// becomes `for<'a,'b> Foo`.
pub fn flatten_late_bound_regions<T>(self, bound2_value: &Binder<Binder<T>>)
-> Binder<T>
where T: TypeFoldable<'tcx>
{
let bound0_value = bound2_value.skip_binder().skip_binder();
let value = self.fold_regions(bound0_value, &mut false,
|region, current_depth| {
match *region {
ty::ReLateBound(debruijn, br) if debruijn.depth >= current_depth => {
// should be true if no escaping regions from bound2_value
assert!(debruijn.depth - current_depth <= 1);
self.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(current_depth), br))
}
_ => {
region
}
}
});
Binder(value)
}
pub fn no_late_bound_regions<T>(self, value: &Binder<T>) -> Option<T>
where T : TypeFoldable<'tcx>
{
if value.0.has_escaping_regions() {
None
} else {
Some(value.0.clone())
}
}
/// Returns a set of all late-bound regions that are constrained
/// by `value`, meaning that if we instantiate those LBR with
/// variables and equate `value` with something else, those
/// variables will also be equated.
pub fn collect_constrained_late_bound_regions<T>(&self, value: &Binder<T>)
-> FxHashSet<ty::BoundRegion>
where T : TypeFoldable<'tcx>
{
self.collect_late_bound_regions(value, true)
}
/// Returns a set of all late-bound regions that appear in `value` anywhere.
pub fn collect_referenced_late_bound_regions<T>(&self, value: &Binder<T>)
-> FxHashSet<ty::BoundRegion>
where T : TypeFoldable<'tcx>
{
self.collect_late_bound_regions(value, false)
}
fn collect_late_bound_regions<T>(&self, value: &Binder<T>, just_constraint: bool)
-> FxHashSet<ty::BoundRegion>
where T : TypeFoldable<'tcx>
{
let mut collector = LateBoundRegionsCollector::new(just_constraint);
let result = value.skip_binder().visit_with(&mut collector);
assert!(!result); // should never have stopped early
collector.regions
}
/// Replace any late-bound regions bound in `value` with `'erased`. Useful in trans but also
/// method lookup and a few other places where precise region relationships are not required.
pub fn erase_late_bound_regions<T>(self, value: &Binder<T>) -> T
where T : TypeFoldable<'tcx>
{
self.replace_late_bound_regions(value, |_| self.types.re_erased).0
}
/// Rewrite any late-bound regions so that they are anonymous. Region numbers are
/// assigned starting at 1 and increasing monotonically in the order traversed
/// by the fold operation.
///
/// The chief purpose of this function is to canonicalize regions so that two
/// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become
/// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and
/// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization.
pub fn anonymize_late_bound_regions<T>(self, sig: &Binder<T>) -> Binder<T>
where T : TypeFoldable<'tcx>,
{
let mut counter = 0;
Binder(self.replace_late_bound_regions(sig, |_| {
counter += 1;
self.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1), ty::BrAnon(counter)))
}).0)
}
}
impl<'a, 'gcx, 'tcx> RegionReplacer<'a, 'gcx, 'tcx> {
fn new<F>(tcx: TyCtxt<'a, 'gcx, 'tcx>, fld_r: &'a mut F)
-> RegionReplacer<'a, 'gcx, 'tcx>
where F : FnMut(ty::BoundRegion) -> ty::Region<'tcx>
{
RegionReplacer {
tcx: tcx,
current_depth: 1,
fld_r: fld_r,
map: FxHashMap()
}
}
}
impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_depth += 1;
let t = t.super_fold_with(self);
self.current_depth -= 1;
t
}
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
if !t.has_regions_escaping_depth(self.current_depth-1) {
return t;
}
t.super_fold_with(self)
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
match *r {
ty::ReLateBound(debruijn, br) if debruijn.depth == self.current_depth => {
let fld_r = &mut self.fld_r;
let region = *self.map.entry(br).or_insert_with(|| fld_r(br));
if let ty::ReLateBound(debruijn1, br) = *region {
// If the callback returns a late-bound region,
// that region should always use depth 1. Then we
// adjust it to the correct depth.
assert_eq!(debruijn1.depth, 1);
self.tcx.mk_region(ty::ReLateBound(debruijn, br))
} else {
region
}
}
_ => r
}
}
}
///////////////////////////////////////////////////////////////////////////
// Region eraser
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// Returns an equivalent value with all free regions removed (note
/// that late-bound regions remain, because they are important for
/// subtyping, but they are anonymized and normalized as well)..
pub fn erase_regions<T>(self, value: &T) -> T
where T : TypeFoldable<'tcx>
{
let value1 = value.fold_with(&mut RegionEraser(self));
debug!("erase_regions({:?}) = {:?}",
value, value1);
return value1;
struct RegionEraser<'a, 'gcx: 'a+'tcx, 'tcx: 'a>(TyCtxt<'a, 'gcx, 'tcx>);
impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionEraser<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.0 }
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
if let Some(u) = self.tcx().normalized_cache.borrow().get(&ty).cloned() {
return u;
}
// FIXME(eddyb) should local contexts have a cache too?
if let Some(ty_lifted) = self.tcx().lift_to_global(&ty) {
let tcx = self.tcx().global_tcx();
let t_norm = ty_lifted.super_fold_with(&mut RegionEraser(tcx));
tcx.normalized_cache.borrow_mut().insert(ty_lifted, t_norm);
t_norm
} else {
ty.super_fold_with(self)
}
}
fn fold_binder<T>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T>
where T : TypeFoldable<'tcx>
{
let u = self.tcx().anonymize_late_bound_regions(t);
u.super_fold_with(self)
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
// because late-bound regions affect subtyping, we can't
// erase the bound/free distinction, but we can replace
// all free regions with 'erased.
//
// Note that we *CAN* replace early-bound regions -- the
// type system never "sees" those, they get substituted
// away. In trans, they will always be erased to 'erased
// whenever a substitution occurs.
match *r {
ty::ReLateBound(..) => r,
_ => self.tcx().types.re_erased
}
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Region shifter
//
// Shifts the De Bruijn indices on all escaping bound regions by a
// fixed amount. Useful in substitution or when otherwise introducing
// a binding level that is not intended to capture the existing bound
// regions. See comment on `shift_regions_through_binders` method in
// `subst.rs` for more details.
pub fn shift_region(region: ty::RegionKind, amount: u32) -> ty::RegionKind {
match region {
ty::ReLateBound(debruijn, br) => {
ty::ReLateBound(debruijn.shifted(amount), br)
}
_ => {
region
}
}
}
pub fn shift_region_ref<'a, 'gcx, 'tcx>(
tcx: TyCtxt<'a, 'gcx, 'tcx>,
region: ty::Region<'tcx>,
amount: u32)
-> ty::Region<'tcx>
{
match region {
&ty::ReLateBound(debruijn, br) if amount > 0 => {
tcx.mk_region(ty::ReLateBound(debruijn.shifted(amount), br))
}
_ => {
region
}
}
}
pub fn shift_regions<'a, 'gcx, 'tcx, T>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
amount: u32,
value: &T) -> T
where T: TypeFoldable<'tcx>
{
debug!("shift_regions(value={:?}, amount={})",
value, amount);
value.fold_with(&mut RegionFolder::new(tcx, &mut false, &mut |region, _current_depth| {
shift_region_ref(tcx, region, amount)
}))
}
/// An "escaping region" is a bound region whose binder is not part of `t`.
///
/// So, for example, consider a type like the following, which has two binders:
///
/// for<'a> fn(x: for<'b> fn(&'a isize, &'b isize))
/// ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ outer scope
/// ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ inner scope
///
/// This type has *bound regions* (`'a`, `'b`), but it does not have escaping regions, because the
/// binders of both `'a` and `'b` are part of the type itself. However, if we consider the *inner
/// fn type*, that type has an escaping region: `'a`.
///
/// Note that what I'm calling an "escaping region" is often just called a "free region". However,
/// we already use the term "free region". It refers to the regions that we use to represent bound
/// regions on a fn definition while we are typechecking its body.
///
/// To clarify, conceptually there is no particular difference between an "escaping" region and a
/// "free" region. However, there is a big difference in practice. Basically, when "entering" a
/// binding level, one is generally required to do some sort of processing to a bound region, such
/// as replacing it with a fresh/skolemized region, or making an entry in the environment to
/// represent the scope to which it is attached, etc. An escaping region represents a bound region
/// for which this processing has not yet been done.
struct HasEscapingRegionsVisitor {
depth: u32,
}
impl<'tcx> TypeVisitor<'tcx> for HasEscapingRegionsVisitor {
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
self.depth += 1;
let result = t.super_visit_with(self);
self.depth -= 1;
result
}
fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
t.region_depth > self.depth
}
fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
r.escapes_depth(self.depth)
}
}
struct HasTypeFlagsVisitor {
flags: ty::TypeFlags,
}
impl<'tcx> TypeVisitor<'tcx> for HasTypeFlagsVisitor {
fn visit_ty(&mut self, t: Ty) -> bool {
debug!("HasTypeFlagsVisitor: t={:?} t.flags={:?} self.flags={:?}", t, t.flags, self.flags);
t.flags.intersects(self.flags)
}
fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
let flags = r.type_flags();
debug!("HasTypeFlagsVisitor: r={:?} r.flags={:?} self.flags={:?}", r, flags, self.flags);
flags.intersects(self.flags)
}
}
/// Collects all the late-bound regions it finds into a hash set.
struct LateBoundRegionsCollector {
current_depth: u32,
regions: FxHashSet<ty::BoundRegion>,
just_constrained: bool,
}
impl LateBoundRegionsCollector {
fn new(just_constrained: bool) -> Self {
LateBoundRegionsCollector {
current_depth: 1,
regions: FxHashSet(),
just_constrained: just_constrained,
}
}
}
impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector {
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
self.current_depth += 1;
let result = t.super_visit_with(self);
self.current_depth -= 1;
result
}
fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
// if we are only looking for "constrained" region, we have to
// ignore the inputs to a projection, as they may not appear
// in the normalized form
if self.just_constrained {
match t.sty {
ty::TyProjection(..) | ty::TyAnon(..) => { return false; }
_ => { }
}
}
t.super_visit_with(self)
}
fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
match *r {
ty::ReLateBound(debruijn, br) if debruijn.depth == self.current_depth => {
self.regions.insert(br);
}
_ => { }
}
false
}
}
| 36.877907 | 99 | 0.572442 |
1832fee4347172ab2d0c30f2efaf44f6efaa435c | 482 | // force-host
#![feature(plugin_registrar)]
#![feature(rustc_private)]
extern crate rustc;
extern crate rustc_plugin;
extern crate rustc_driver;
use rustc_plugin::Registry;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
// This pass is built in to LLVM.
//
// Normally, we would name a pass that was registered through
// C++ static object constructors in the same .so file as the
// plugin registrar.
reg.register_llvm_pass("gvn");
}
| 22.952381 | 65 | 0.711618 |
876516720273ed2ca9fc75e4d9d9988abedd417b | 3,802 | // This file is part of Substrate.
// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Autogenerated weights for pallet_balances
//!
//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 2.0.0
//! DATE: 2021-01-06, STEPS: [50, ], REPEAT: 20, LOW RANGE: [], HIGH RANGE: []
//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128
// Executed Command:
// target/release/substrate
// benchmark
// --chain=dev
// --steps=50
// --repeat=20
// --pallet=pallet_balances
// --extrinsic=*
// --execution=wasm
// --wasm-execution=compiled
// --heap-pages=4096
// --output=./frame/balances/src/weights.rs
// --template=./.maintain/frame-weight-template.hbs
#![allow(unused_parens)]
#![allow(unused_imports)]
use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}};
use sp_std::marker::PhantomData;
/// Weight functions needed for pallet_balances.
pub trait WeightInfo {
fn transfer() -> Weight;
fn transfer_keep_alive() -> Weight;
fn set_balance_creating() -> Weight;
fn set_balance_killing() -> Weight;
fn force_transfer() -> Weight;
}
/// Weights for pallet_balances using the Substrate node and recommended hardware.
pub struct SubstrateWeight<T>(PhantomData<T>);
impl<T: frame_system::Config> WeightInfo for SubstrateWeight<T> {
fn transfer() -> Weight {
(100_698_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn transfer_keep_alive() -> Weight {
(69_407_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn set_balance_creating() -> Weight {
(38_489_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn set_balance_killing() -> Weight {
(48_458_000 as Weight)
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn force_transfer() -> Weight {
(99_320_000 as Weight)
.saturating_add(T::DbWeight::get().reads(2 as Weight))
.saturating_add(T::DbWeight::get().writes(2 as Weight))
}
}
// For backwards compatibility and tests
impl WeightInfo for () {
fn transfer() -> Weight {
(100_698_000 as Weight)
.saturating_add(RocksDbWeight::get().reads(1 as Weight))
.saturating_add(RocksDbWeight::get().writes(1 as Weight))
}
fn transfer_keep_alive() -> Weight {
(69_407_000 as Weight)
.saturating_add(RocksDbWeight::get().reads(1 as Weight))
.saturating_add(RocksDbWeight::get().writes(1 as Weight))
}
fn set_balance_creating() -> Weight {
(38_489_000 as Weight)
.saturating_add(RocksDbWeight::get().reads(1 as Weight))
.saturating_add(RocksDbWeight::get().writes(1 as Weight))
}
fn set_balance_killing() -> Weight {
(48_458_000 as Weight)
.saturating_add(RocksDbWeight::get().reads(1 as Weight))
.saturating_add(RocksDbWeight::get().writes(1 as Weight))
}
fn force_transfer() -> Weight {
(99_320_000 as Weight)
.saturating_add(RocksDbWeight::get().reads(2 as Weight))
.saturating_add(RocksDbWeight::get().writes(2 as Weight))
}
}
| 33.946429 | 86 | 0.712783 |
2157ae109e109eef348146ce859a881651e0a52f | 5,366 | use std::convert::TryFrom;
use cranelift_codegen::ir;
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
pub enum ValueType {
I32,
I64,
F32,
F64,
}
impl Display for ValueType {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self {
ValueType::I32 => write!(f, "I32"),
ValueType::I64 => write!(f, "I64"),
ValueType::F32 => write!(f, "F32"),
ValueType::F64 => write!(f, "F64"),
}
}
}
#[derive(Debug)]
pub enum ValueError {
Unrepresentable,
InvalidVMContext
}
impl TryFrom<&ir::AbiParam> for ValueType {
type Error = ValueError;
fn try_from(value: &ir::AbiParam) -> Result<Self, Self::Error> {
match value {
ir::AbiParam {
value_type: cranelift_ty,
purpose: ir::ArgumentPurpose::Normal,
extension: ir::ArgumentExtension::None,
location: ir::ArgumentLoc::Unassigned
} => {
let size = cranelift_ty.bits();
if cranelift_ty.is_int() {
match size {
32 => Ok(ValueType::I32),
64 => Ok(ValueType::I64),
_ => Err(ValueError::Unrepresentable),
}
} else if cranelift_ty.is_float() {
match size {
32 => Ok(ValueType::F32),
64 => Ok(ValueType::F64),
_ => Err(ValueError::Unrepresentable),
}
} else {
Err(ValueError::Unrepresentable)
}
},
_ => Err(ValueError::Unrepresentable)
}
}
}
/// A signature for a function in a wasm module.
///
/// Note that this does not explicitly name VMContext as a parameter! It is assumed that all wasm
/// functions take VMContext as their first parameter.
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub struct Signature {
pub params: Vec<ValueType>,
// In the future, wasm may permit this to be a Vec of ValueType
pub ret_ty: Option<ValueType>,
}
impl Display for Signature {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "(")?;
for (i, p) in self.params.iter().enumerate() {
if i == 0 {
write!(f, "{}", p)?;
} else {
write!(f, ", {}", p)?;
}
}
write!(f, ") -> ")?;
match self.ret_ty {
Some(ty) => write!(f, "{}", ty),
None => write!(f, "()")
}
}
}
#[macro_export]
macro_rules! lucet_signature {
((() -> ())) => {
$crate::Signature {
params: vec![],
ret_ty: None
}
};
(($($arg_ty:ident),*) -> ()) => {
$crate::Signature {
params: vec![$($crate::ValueType::$arg_ty),*],
ret_ty: None,
}
};
(($($arg_ty:ident),*) -> $ret_ty:ident) => {
$crate::Signature {
params: vec![$($crate::ValueType::$arg_ty),*],
ret_ty: Some($crate::ValueType::$ret_ty),
}
};
}
#[derive(Debug)]
pub enum SignatureError {
BadElement(ir::AbiParam, ValueError),
BadSignature
}
impl TryFrom<&ir::Signature> for Signature {
type Error = SignatureError;
fn try_from(value: &ir::Signature) -> Result<Self, Self::Error> {
let mut params: Vec<ValueType> = Vec::new();
let mut param_iter = value.params.iter();
// Enforce that the first parameter is VMContext, as Signature assumes.
// Even functions declared no-arg take VMContext in reality.
if let Some(param) = param_iter.next() {
match ¶m {
ir::AbiParam {
value_type: value,
purpose: ir::ArgumentPurpose::VMContext,
extension: ir::ArgumentExtension::None,
location: ir::ArgumentLoc::Unassigned
} => {
if value.is_int() && value.bits() == 64 {
// this is VMContext, so we can move on.
} else {
return Err(SignatureError::BadElement(param.to_owned(), ValueError::InvalidVMContext));
}
},
_ => {
return Err(SignatureError::BadElement(param.to_owned(), ValueError::InvalidVMContext));
}
}
} else {
return Err(SignatureError::BadSignature);
}
for param in param_iter {
let value_ty = ValueType::try_from(param)
.map_err(|e| SignatureError::BadElement(param.clone(), e))?;
params.push(value_ty);
}
let ret_ty: Option<ValueType> = match value.returns.as_slice() {
&[] => None,
&[ref ret_ty] => {
let value_ty = ValueType::try_from(ret_ty)
.map_err(|e| SignatureError::BadElement(ret_ty.clone(), e))?;
Some(value_ty)
},
_ => {
return Err(SignatureError::BadSignature);
}
};
Ok(Signature { params, ret_ty })
}
}
| 30.316384 | 111 | 0.493291 |
76f09a7225f95f3b84795433eafc1225e15d6aca | 12,787 | // This file is auto-generated by rute_gen. DO NOT EDIT.
use std::cell::Cell;
use std::rc::Rc;
#[allow(unused_imports)]
use std::marker::PhantomData;
#[allow(unused_imports)]
use std::os::raw::c_void;
#[allow(unused_imports)]
use std::mem::transmute;
#[allow(unused_imports)]
use std::ffi::{CStr, CString};
use rute_ffi_base::*;
#[allow(unused_imports)]
use auto::*;
/// **Notice these docs are heavy WIP and not very relevent yet**
///
/// In Qt there is a often a need to represent the layout of the pixels in a
/// graphics buffer. Internally QPixelFormat stores everything in a 64 bit
/// datastructure. This gives performance but also some limitations.
///
/// QPixelFormat can describe 5 color channels and 1 alpha channel, each can use
/// 6 bits to describe the size of the color channel.
///
/// The position of the alpha channel is described with a separate enum. This is
/// to make it possible to describe QImage formats like ARGB32, and also
/// describe typical OpenGL formats like RBGA8888.
///
/// How pixels are suppose to be read is determined by the TypeInterpretation
/// enum. It describes if color values are suppose to be read byte per byte,
/// or if a pixel is suppose to be read as a complete int and then masked.
/// **See also:** TypeInterpretation
///
/// There is no support for describing YUV's macro pixels. Instead a list of YUV
/// formats has been made. When a QPixelFormat is describing a YUV format, the
/// bitsPerPixel value has been deduced by the YUV Layout enum. Also, the color
/// channels should all be set to zero except the fifth color channel that
/// should store the bitsPerPixel value.
/// # Licence
///
/// The documentation is an adoption of the original [Qt Documentation](http://doc.qt.io/) and provided herein is licensed under the terms of the [GNU Free Documentation License version 1.3](http://www.gnu.org/licenses/fdl.html) as published by the Free Software Foundation.
#[derive(Clone)]
pub struct PixelFormat<'a> {
#[doc(hidden)]
pub data: Rc<Cell<Option<*const RUBase>>>,
#[doc(hidden)]
pub all_funcs: *const RUPixelFormatAllFuncs,
#[doc(hidden)]
pub owned: bool,
#[doc(hidden)]
pub _marker: PhantomData<::std::cell::Cell<&'a ()>>,
}
impl<'a> PixelFormat<'a> {
pub fn new() -> PixelFormat<'a> {
let data = Rc::new(Cell::new(None));
let ffi_data = unsafe {
((*rute_ffi_get()).create_pixel_format)(
::std::ptr::null(),
transmute(rute_object_delete_callback as usize),
Rc::into_raw(data.clone()) as *const c_void,
)
};
data.set(Some(ffi_data.qt_data));
PixelFormat {
data,
all_funcs: ffi_data.all_funcs,
owned: true,
_marker: PhantomData,
}
}
#[allow(dead_code)]
pub(crate) fn new_from_rc(ffi_data: RUPixelFormat) -> PixelFormat<'a> {
PixelFormat {
data: unsafe { Rc::from_raw(ffi_data.host_data as *const Cell<Option<*const RUBase>>) },
all_funcs: ffi_data.all_funcs,
owned: false,
_marker: PhantomData,
}
}
#[allow(dead_code)]
pub(crate) fn new_from_owned(ffi_data: RUPixelFormat) -> PixelFormat<'a> {
PixelFormat {
data: Rc::new(Cell::new(Some(ffi_data.qt_data as *const RUBase))),
all_funcs: ffi_data.all_funcs,
owned: true,
_marker: PhantomData,
}
}
#[allow(dead_code)]
pub(crate) fn new_from_temporary(ffi_data: RUPixelFormat) -> PixelFormat<'a> {
PixelFormat {
data: Rc::new(Cell::new(Some(ffi_data.qt_data as *const RUBase))),
all_funcs: ffi_data.all_funcs,
owned: false,
_marker: PhantomData,
}
}
///
/// Accessor function for getting the colorModel.
pub fn color_model(&self) -> ColorModel {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).color_model)(obj_data);
let ret_val = { transmute::<u32, ColorModel>(ret_val) };
ret_val
}
}
///
/// Accessor function for getting the channelCount. Channel Count is deduced
/// by color channels with a size > 0 and if the size of the alpha channel is > 0.
pub fn channel_count(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).channel_count)(obj_data);
ret_val
}
}
///
/// Accessor function for the size of the red color channel.
pub fn red_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).red_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the size of the green color channel.
pub fn green_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).green_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the size of the blue color channel.
pub fn blue_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).blue_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the cyan color channel.
pub fn cyan_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).cyan_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the megenta color channel.
pub fn magenta_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).magenta_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the yellow color channel.
pub fn yellow_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).yellow_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the black/key color channel.
pub fn black_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).black_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the hue channel size.
pub fn hue_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).hue_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the saturation channel size.
pub fn saturation_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).saturation_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the lightness channel size.
pub fn lightness_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).lightness_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the brightness channel size.
pub fn brightness_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).brightness_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the alpha channel size.
pub fn alpha_size(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).alpha_size)(obj_data);
ret_val
}
}
///
/// Accessor function for the bits used per pixel. This function returns the
/// sum of the color channels + the size of the alpha channel.
pub fn bits_per_pixel(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).bits_per_pixel)(obj_data);
ret_val
}
}
///
/// Accessor function for alphaUsage.
pub fn alpha_usage(&self) -> AlphaUsage {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).alpha_usage)(obj_data);
let ret_val = { transmute::<u32, AlphaUsage>(ret_val) };
ret_val
}
}
///
/// Accessor function for alphaPosition.
pub fn alpha_position(&self) -> AlphaPosition {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).alpha_position)(obj_data);
let ret_val = { transmute::<u32, AlphaPosition>(ret_val) };
ret_val
}
}
///
/// Accessor function for the AlphaPremultiplied enum. This indicates if the
/// alpha channel is multiplied in to the color channels.
///
pub fn premultiplied(&self) -> AlphaPremultiplied {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).premultiplied)(obj_data);
let ret_val = { transmute::<u32, AlphaPremultiplied>(ret_val) };
ret_val
}
}
///
/// Accessor function for the type representation of a color channel or a pixel.
///
/// **See also:** TypeInterpretation
pub fn type_interpretation(&self) -> TypeInterpretation {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).type_interpretation)(obj_data);
let ret_val = { transmute::<u32, TypeInterpretation>(ret_val) };
ret_val
}
}
///
/// The byte order is almost always set the the byte order of the current
/// system. However, it can be useful to describe some YUV formats. This
/// function should never return QPixelFormat::CurrentSystemEndian as this
/// value is translated to a endian value in the constructor.
pub fn byte_order(&self) -> ByteOrder {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).byte_order)(obj_data);
let ret_val = { transmute::<u32, ByteOrder>(ret_val) };
ret_val
}
}
///
/// Accessor function for the YUVLayout. It is difficult to describe the color
/// channels of a YUV pixel format since YUV color model uses macro pixels.
/// Instead the layout of the pixels are stored as an enum.
pub fn yuv_layout(&self) -> YUVLayout {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).yuv_layout)(obj_data);
let ret_val = { transmute::<u32, YUVLayout>(ret_val) };
ret_val
}
}
pub fn sub_enum(&self) -> u8 {
let (obj_data, funcs) = self.get_pixel_format_obj_funcs();
unsafe {
let ret_val = ((*funcs).sub_enum)(obj_data);
ret_val
}
}
pub fn build(&self) -> Self {
self.clone()
}
}
pub trait PixelFormatTrait<'a> {
#[inline]
#[doc(hidden)]
fn get_pixel_format_obj_funcs(&self) -> (*const RUBase, *const RUPixelFormatFuncs);
}
impl<'a> PixelFormatTrait<'a> for PixelFormat<'a> {
#[doc(hidden)]
fn get_pixel_format_obj_funcs(&self) -> (*const RUBase, *const RUPixelFormatFuncs) {
let obj = self.data.get().unwrap();
unsafe { (obj, (*self.all_funcs).pixel_format_funcs) }
}
}
#[repr(u32)]
pub enum ColorModel {
Rgb = 0,
Bgr = 1,
Indexed = 2,
Grayscale = 3,
Cmyk = 4,
Hsl = 5,
Hsv = 6,
Yuv = 7,
Alpha = 8,
}
#[repr(u32)]
pub enum AlphaUsage {
UsesAlpha = 0,
IgnoresAlpha = 1,
}
#[repr(u32)]
pub enum AlphaPosition {
AtBeginning = 0,
AtEnd = 1,
}
#[repr(u32)]
pub enum AlphaPremultiplied {
NotPremultiplied = 0,
Premultiplied = 1,
}
#[repr(u32)]
pub enum TypeInterpretation {
UnsignedInteger = 0,
UnsignedShort = 1,
UnsignedByte = 2,
FloatingPoint = 3,
}
#[repr(u32)]
pub enum YUVLayout {
YuV444 = 0,
YuV422 = 1,
YuV411 = 2,
YuV420P = 3,
YuV420Sp = 4,
YV12 = 5,
Uyvy = 6,
Yuyv = 7,
NV12 = 8,
NV21 = 9,
ImC1 = 10,
ImC2 = 11,
ImC3 = 12,
ImC4 = 13,
Y8 = 14,
Y16 = 15,
}
#[repr(u32)]
pub enum ByteOrder {
LittleEndian = 0,
BigEndian = 1,
CurrentSystemEndian = 2,
}
| 31.41769 | 274 | 0.594275 |
9c57516a3ba4737b71fec963ab2cae6ae296718f | 1,682 | use codespan::{ByteIndex, ByteSpan};
use ast::*;
pub type Span = ByteSpan;
macro_rules! impl_into_span {
($type:ident) => {
impl IntoSpan for $type {
fn into_span(&self) -> Span { self.span }
}
}
}
pub trait IntoSpan {
fn into_span(&self) -> Span;
}
impl IntoSpan for (ByteIndex, ByteIndex) {
fn into_span(&self) -> Span {
Span::new(self.0, self.1)
}
}
impl IntoSpan for Dec {
fn into_span(&self) -> Span {
match self {
| Dec::Var{span, ..}
| Dec::Fun(_, span)
| Dec::Type(_, span) => *span,
}
}
}
impl_into_span!(FunDec);
impl_into_span!(FieldDec);
impl_into_span!(TypeDec);
impl_into_span!(Field);
impl IntoSpan for Type {
fn into_span(&self) -> Span {
match self {
| Type::Name(_, span)
| Type::Rec(_, span)
| Type::Arr(_, _, span) => *span,
}
}
}
impl IntoSpan for Var {
fn into_span(&self) -> Span {
match self {
| Var::Simple(_, span)
| Var::Field(_, _, _, span)
| Var::Index(_, _, span) => *span,
}
}
}
impl IntoSpan for Exp {
fn into_span(&self) -> Span {
match self {
| Exp::Break(span)
| Exp::Nil(span)
| Exp::Var(_, span)
| Exp::Int(_, span)
| Exp::Str(_, span)
| Exp::Call{span, ..}
| Exp::Neg(_, span)
| Exp::Bin{span, ..}
| Exp::Rec{span, ..}
| Exp::Seq(_, span)
| Exp::Ass{span, ..}
| Exp::If{span, ..}
| Exp::While{span, ..}
| Exp::For{span, ..}
| Exp::Let{span, ..}
| Exp::Arr{span, ..} => *span,
}
}
}
| 20.512195 | 53 | 0.477408 |
092c276078e84599f0e32495a17a9c69aa8258f9 | 2,286 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use libra_metrics::{register_histogram, register_int_counter, Histogram, IntCounter};
use once_cell::sync::Lazy;
pub static LIBRA_EXECUTOR_EXECUTE_AND_COMMIT_CHUNK_SECONDS: Lazy<Histogram> = Lazy::new(|| {
register_histogram!(
// metric name
"libra_executor_execute_and_commit_chunk_seconds",
// metric description
"The time spent in seconds of chunk execution and committing in Libra executor"
)
.unwrap()
});
pub static LIBRA_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS: Lazy<Histogram> = Lazy::new(|| {
register_histogram!(
// metric name
"libra_executor_vm_execute_block_seconds",
// metric description
"The time spent in seconds of vm block execution in Libra executor"
)
.unwrap()
});
pub static LIBRA_EXECUTOR_ERRORS: Lazy<IntCounter> = Lazy::new(|| {
register_int_counter!("libra_executor_error_total", "Cumulative number of errors").unwrap()
});
pub static LIBRA_EXECUTOR_EXECUTE_BLOCK_SECONDS: Lazy<Histogram> = Lazy::new(|| {
register_histogram!(
// metric name
"libra_executor_execute_block_seconds",
// metric description
"The total time spent in seconds of block execution in Libra executor "
)
.unwrap()
});
pub static LIBRA_EXECUTOR_COMMIT_BLOCKS_SECONDS: Lazy<Histogram> = Lazy::new(|| {
register_histogram!(
// metric name
"libra_executor_commit_blocks_seconds",
// metric description
"The total time spent in seconds of commiting blocks in Libra executor "
)
.unwrap()
});
pub static LIBRA_EXECUTOR_SAVE_TRANSACTIONS_SECONDS: Lazy<Histogram> = Lazy::new(|| {
register_histogram!(
// metric name
"libra_executor_save_transactions_seconds",
// metric description
"The time spent in seconds of calling save_transactions to storage in Libra executor"
)
.unwrap()
});
pub static LIBRA_EXECUTOR_TRANSACTIONS_SAVED: Lazy<Histogram> = Lazy::new(|| {
register_histogram!(
// metric name
"libra_executor_transactions_saved",
// metric description
"The number of transactions saved to storage in Libra executor"
)
.unwrap()
});
| 32.657143 | 95 | 0.692038 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.