hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e42c89e67f4e5fe38dbb1cb447963c939cf98307 | 753 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let a: StrBuf = "this \
is a test".to_strbuf();
let b: StrBuf =
"this \
is \
another \
test".to_strbuf();
assert_eq!(a, "this is a test".to_strbuf());
assert_eq!(b, "this is another test".to_strbuf());
}
| 34.227273 | 68 | 0.649402 |
d9d1d5dd54c40d73648caa987a2f4a45053cdf96 | 17,299 | use super::CanisterId;
use hex::decode;
use ic_types::{
ic00,
ic00::Payload,
messages::{CanisterInstallMode, SignedIngress, UserQuery},
time::current_time_and_expiry_time,
PrincipalId, UserId,
};
use std::{
convert::TryFrom,
fmt,
fs::File,
io::{self, Read},
str::Chars,
string::FromUtf8Error,
};
#[derive(Debug, PartialEq)]
pub(crate) enum Message {
Ingress(SignedIngress),
Query(UserQuery),
Install(SignedIngress),
Create(SignedIngress),
}
#[derive(Debug)]
pub enum LineIteratorError {
IoError(io::Error),
BufferLengthExceeded(Vec<u8>),
FromUtf8Error(FromUtf8Error),
}
impl fmt::Display for LineIteratorError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use LineIteratorError::*;
match self {
IoError(e) => write!(f, "IO error: {}", e),
BufferLengthExceeded(_) => write!(f, "Line length exceeds buffer length"),
FromUtf8Error(e) => write!(f, "UTF-8 conversion error: {}", e),
}
}
}
const LINE_ITERATOR_BUFFER_SIZE: usize = 16_777_216;
struct LineIterator<R: Read> {
inner: R,
buffer: Vec<u8>,
}
impl<R: Read> LineIterator<R> {
fn new(inner: R) -> Self {
Self {
inner,
buffer: vec![],
}
}
fn split_line(&mut self) -> Option<Result<String, LineIteratorError>> {
let parts: Vec<_> = self
.buffer
.splitn(2, |c| *c == b'\n')
.map(|slice| slice.to_vec())
.collect();
if parts.len() == 1 {
return None;
}
assert!(parts.len() == 2);
let mut iter = parts.into_iter();
let line = iter.next().unwrap();
self.buffer = iter.next().unwrap();
Some(String::from_utf8(line).map_err(LineIteratorError::FromUtf8Error))
}
}
impl<R: Read> Iterator for LineIterator<R> {
type Item = Result<String, LineIteratorError>;
fn next(&mut self) -> Option<Self::Item> {
if !self.buffer.is_empty() {
let line = self.split_line();
if line.is_some() {
return line;
}
}
loop {
match self
.inner
.by_ref()
.take((LINE_ITERATOR_BUFFER_SIZE - self.buffer.len()) as _)
.read_to_end(&mut self.buffer)
{
Err(e) => return Some(Err(LineIteratorError::IoError(e))),
Ok(0) => {
if self.buffer.is_empty() {
return None;
}
let bytes = self.buffer.clone();
self.buffer.clear();
return Some(
String::from_utf8(bytes).map_err(LineIteratorError::FromUtf8Error),
);
}
Ok(_) => match self.split_line() {
Some(line) => return Some(line),
None if self.buffer.len() == LINE_ITERATOR_BUFFER_SIZE => {
let bytes = self.buffer.clone();
self.buffer.clear();
return Some(Err(LineIteratorError::BufferLengthExceeded(bytes)));
}
None => continue,
},
}
}
}
}
pub(crate) fn msg_stream_from_file(
filename: &str,
) -> Result<impl Iterator<Item = Result<Message, String>>, String> {
let f = File::open(filename).map_err(|e| e.to_string())?;
let line_iterator = LineIterator::new(f);
Ok(line_iterator
.enumerate()
// let's skip commented ('#') and empty lines
.filter(|(_idx, line)| match line {
Ok(s) => !s.is_empty() && !s.starts_with('#'),
_ => true,
})
.map(|(i, line)| match line {
Ok(line) => {
parse_message(&line, i as u64).map_err(|e| format!("Line {}: {}", i + 1, e))
}
Err(e) => Err(format!("Error while reading line {}: {}", i, e)),
}))
}
fn parse_message(s: &str, nonce: u64) -> Result<Message, String> {
let s = s.trim_end();
let tokens: Vec<&str> = s.splitn(4, char::is_whitespace).collect();
match &tokens[..] {
[] => Err("Too few arguments.".to_string()),
["ingress", canister_id, method_name, payload] => {
use ic_test_utilities::types::messages::SignedIngressBuilder;
let canister_id = parse_canister_id(canister_id)?;
let method_name = validate_method_name(method_name)?;
let method_payload = parse_octet_string(payload)?;
let signed_ingress = SignedIngressBuilder::new()
// `source` should become a self-authenticating id according
// to https://sdk.dfinity.org/docs/interface-spec/index.html#id-classes
.canister_id(canister_id)
.method_name(method_name)
.method_payload(method_payload)
.nonce(nonce)
.build();
Ok(Message::Ingress(signed_ingress))
}
["query", canister_id, method_name, payload] => Ok(Message::Query(UserQuery {
source: UserId::from(PrincipalId::new_anonymous()),
receiver: parse_canister_id(canister_id)?,
method_name: validate_method_name(method_name)?,
method_payload: parse_octet_string(payload)?,
ingress_expiry: current_time_and_expiry_time().1.as_nanos_since_unix_epoch(),
nonce: Some(nonce.to_le_bytes().to_vec()),
})),
["create"] => parse_create(nonce),
["install", canister_id, wasm_file, payload] => {
parse_install(nonce, canister_id, payload, wasm_file, "install")
}
["reinstall", canister_id, wasm_file, payload] => {
parse_install(nonce, canister_id, payload, wasm_file, "reinstall")
}
["upgrade", canister_id, wasm_file, payload] => {
parse_install(nonce, canister_id, payload, wasm_file, "upgrade")
}
_ => Err(format!(
"Failed to parse line {}, don't have a pattern to match this with",
s
)),
}
}
fn parse_canister_id(canister_id: &str) -> Result<CanisterId, String> {
use std::str::FromStr;
match PrincipalId::from_str(canister_id) {
Ok(id) => match CanisterId::new(id) {
Ok(id) => Ok(id),
Err(err) => Err(format!(
"Failed to convert {} to canister id with {}",
canister_id, err
)),
},
Err(err) => Err(format!(
"Failed to convert {} to principal id with {}",
canister_id, err
)),
}
}
fn parse_create(nonce: u64) -> Result<Message, String> {
use ic_test_utilities::types::messages::SignedIngressBuilder;
let signed_ingress = SignedIngressBuilder::new()
.method_name(ic00::Method::ProvisionalCreateCanisterWithCycles)
.canister_id(ic00::IC_00)
.method_payload(ic00::ProvisionalCreateCanisterWithCyclesArgs::new(None).encode())
.nonce(nonce)
.build();
Ok(Message::Create(signed_ingress))
}
fn parse_install(
nonce: u64,
canister_id: &str,
payload: &str,
wasm_file: &str,
mode: &str,
) -> Result<Message, String> {
use ic_test_utilities::types::messages::SignedIngressBuilder;
let mut wasm_data = Vec::new();
let mut wasm_file = File::open(wasm_file)
.map_err(|e| format!("Could not open wasm file: {} - Error: {}", wasm_file, e))?;
wasm_file
.read_to_end(&mut wasm_data)
.map_err(|e| e.to_string())?;
let canister_id = parse_canister_id(canister_id)?;
let payload = parse_octet_string(payload)?;
let signed_ingress = SignedIngressBuilder::new()
// `source` should become a self-authenticating id according
// to https://sdk.dfinity.org/docs/interface-spec/index.html#id-classes
.canister_id(ic00::IC_00)
.method_name(ic00::Method::InstallCode)
.method_payload(
ic00::InstallCodeArgs::new(
CanisterInstallMode::try_from(mode.to_string()).unwrap(),
canister_id,
wasm_data,
payload,
None,
Some(8 * 1024 * 1024 * 1024), // drun users dont care about memory limits
None,
)
.encode(),
)
.nonce(nonce)
.build();
Ok(Message::Install(signed_ingress))
}
fn validate_method_name(method_name: &str) -> Result<String, String> {
fn is_ident_start(c: char) -> bool {
c.is_ascii() && (c.is_alphabetic() || c == '_')
}
fn is_ident_tail(c: char) -> bool {
c.is_ascii() && (c.is_alphanumeric() || c == '_')
}
let mut chars = method_name.chars();
let is_legal_start = chars.next().map(is_ident_start).unwrap_or(false);
let is_legal_tail = chars.all(is_ident_tail);
if !(is_legal_start && is_legal_tail) {
Err(format!("Illegal method name: {}.", method_name))
} else {
Ok(String::from(method_name))
}
}
fn parse_octet_string(input_str: &str) -> Result<Vec<u8>, String> {
if input_str.starts_with('"') {
parse_quoted(input_str)
} else {
parse_hex(input_str)
}
}
fn parse_quoted(quoted_str: &str) -> Result<Vec<u8>, String> {
if !quoted_str.is_ascii() {
return Err(String::from("Only ASCII strings are allowed."));
}
let mut chars = quoted_str.chars();
let mut res: Vec<u8> = Vec::new();
let mut escaped = false;
if Some('"') != chars.next() {
return Err(String::from(
"Double-quoted string must be enclosed in double quotes.",
));
}
let mut c = chars.next();
while let Some(cur) = c {
if escaped {
let b = match cur {
'x' => parse_escape(&mut chars, Radix::Hex)?,
'b' => parse_escape(&mut chars, Radix::Bin)?,
'"' => b'"',
'\\' => b'\\',
_ => return Err(format!("Illegal escape sequence {}", cur)),
};
res.push(b);
escaped = false;
} else {
match cur {
'\\' => escaped = true,
'"' => {
chars.next(); // consume '"'
break;
}
_ => res.push(cur as u8),
}
}
c = chars.next();
}
if chars.next().is_some() {
return Err(String::from("Trailing characters after string terminator."));
}
Ok(res)
}
fn parse_escape(chars: &mut Chars<'_>, radix: Radix) -> Result<u8, String> {
let len = match radix {
Radix::Bin => 8,
Radix::Hex => 2,
};
let s = chars.take(len).collect::<String>();
if s.len() >= len {
u8::from_str_radix(&s, radix as u32).map_err(|e| e.to_string())
} else {
Err(format!(
"Escape sequence for radix {:?} too short: {}",
radix, s
))
}
}
fn parse_hex(s: &str) -> Result<Vec<u8>, String> {
if let Some(s) = s.strip_prefix("0x") {
decode(s).map_err(|e| e.to_string())
} else {
Err(format!("Illegal hex character sequence {}.", s))
}
}
#[derive(Debug)]
enum Radix {
Bin = 2,
Hex = 16,
}
#[cfg(test)]
mod tests {
use super::*;
use ic_test_utilities::types::{ids::canister_test_id, messages::SignedIngressBuilder};
use std::io::Cursor;
const APP_CANISTER_URL: &str = "ryjl3-tyaaa-aaaaa-aaaba-cai";
const APP_CANISTER_ID: u64 = 2;
#[test]
fn test_parse_message_quoted_payload_succeeds() {
let s = &format!(
"ingress {} write \"payload \\x0a\\b00010001\"",
APP_CANISTER_URL
);
let parsed_message = parse_message(s, 0).unwrap();
let expiry_time = match &parsed_message {
Message::Ingress(signed_ingress) => signed_ingress.expiry_time(),
_ => panic!(
"parse_message() returned an unexpected message type: {:?}",
parsed_message
),
};
let expected = Message::Ingress(
SignedIngressBuilder::new()
.canister_id(canister_test_id(APP_CANISTER_ID))
.method_name("write".to_string())
.method_payload(vec![112, 97, 121, 108, 111, 97, 100, 32, 10, 17])
.nonce(0)
.expiry_time(expiry_time)
.build(),
);
assert_eq!(expected, parsed_message);
}
#[test]
fn test_parse_message_hex_payload_succeeds() {
let s = &format!("ingress {} write 0x010203", APP_CANISTER_URL);
let parsed_message = parse_message(s, 0).unwrap();
let expiry_time = match &parsed_message {
Message::Ingress(signed_ingress) => signed_ingress.expiry_time(),
_ => panic!(
"parse_message() returned an unexpected message type: {:?}",
parsed_message
),
};
let expected = Message::Ingress(
SignedIngressBuilder::new()
.canister_id(canister_test_id(APP_CANISTER_ID))
.method_name("write".to_string())
.method_payload(vec![1, 2, 3])
.nonce(0)
.expiry_time(expiry_time)
.build(),
);
assert_eq!(expected, parsed_message);
let s = &format!("query {} read 0x010203", APP_CANISTER_URL);
let nonce: u64 = 0;
let parsed_message = parse_message(s, 0).unwrap();
let ingress_expiry = match &parsed_message {
Message::Query(query) => query.ingress_expiry,
_ => panic!(
"parse_message() returned an unexpected message type: {:?}",
parsed_message
),
};
let expected = Message::Query(UserQuery {
source: UserId::from(PrincipalId::new_anonymous()),
receiver: canister_test_id(APP_CANISTER_ID),
method_name: String::from("read"),
method_payload: vec![1, 2, 3],
ingress_expiry,
nonce: Some(nonce.to_le_bytes().to_vec()),
});
assert_eq!(expected, parsed_message);
}
#[test]
fn test_parse_message_invalid_escapes_fails() {
let s = &format!("query {} read \"\\xzz\"", APP_CANISTER_URL);
assert!(parse_message(s, 0).is_err());
let s = &format!("query {} read \"\\b01\"", APP_CANISTER_URL);
assert!(parse_message(s, 0).is_err());
let s = &format!("query {} read \"\\x1\"", APP_CANISTER_URL);
assert!(parse_message(s, 0).is_err());
let s = &format!("query {} read \"\\b2\"", APP_CANISTER_URL);
assert!(parse_message(s, 0).is_err());
}
#[test]
fn test_illegal_method_name_must_fail() {
let s = &format!("query {} 0read \"\\xzz\"", APP_CANISTER_URL);
assert!(parse_message(s, 0).is_err());
let s = &format!("query {} üread \"\\xzz\"", APP_CANISTER_URL);
assert!(parse_message(s, 0).is_err());
}
#[test]
fn test_line_iterator() {
let text = Cursor::new(
r#"O for a voice like thunder, and a tongue
To drown the throat of war! When the senses
Are shaken, and the soul is driven to madness,
Who can stand?
"#,
);
let mut lines = LineIterator::new(text);
assert_eq!(
lines.next().unwrap().unwrap(),
"O for a voice like thunder, and a tongue"
);
assert_eq!(
lines.next().unwrap().unwrap(),
"To drown the throat of war! When the senses"
);
assert_eq!(
lines.next().unwrap().unwrap(),
"Are shaken, and the soul is driven to madness,"
);
assert_eq!(lines.next().unwrap().unwrap(), "Who can stand?");
lines.next();
assert!(lines.next().is_none());
}
#[test]
fn test_line_iterator_no_newline() {
let text = "O for a voice like thunder, and a tongue";
let mut lines = LineIterator::new(Cursor::new(text));
assert_eq!(lines.next().unwrap().unwrap(), text);
lines.next();
assert!(lines.next().is_none());
}
#[test]
fn test_line_iterator_line_same_length_as_input_buffer() {
let mut text = " ".repeat(LINE_ITERATOR_BUFFER_SIZE - 1);
text.push('\n');
assert!(text.len() == LINE_ITERATOR_BUFFER_SIZE);
let mut lines = LineIterator::new(Cursor::new(text));
let line = lines.next();
assert!(line.unwrap().is_ok());
lines.next();
assert!(lines.next().is_none());
}
#[test]
fn test_line_iterator_line_longer_than_input_buffer() {
let text = " ".repeat(LINE_ITERATOR_BUFFER_SIZE) + "continuation past the buffer";
assert!(text.len() > LINE_ITERATOR_BUFFER_SIZE);
let mut lines = LineIterator::new(Cursor::new(text));
assert!(lines.next().unwrap().is_err());
assert_eq!(
lines.next().unwrap().unwrap(),
"continuation past the buffer"
);
lines.next();
assert!(lines.next().is_none());
}
}
| 32.274254 | 92 | 0.544771 |
e2b81d57ba3f40e86d9eb64971081c3b43bcfcd2 | 1,949 | use std::collections::VecDeque;
use crate::{
listing,
notifier::{self, DiagType, Diagnostic, Highlight},
token::{
tokens::{
expected, too_few_operands,
traits::{Assemble, Requirements},
},
Token,
},
types::{Listings, SymbolTable},
};
token!(Ldi);
impl Assemble for Ldi {
fn assembled(self, program_counter: &mut i16, symbols: &SymbolTable, symbol: &str) -> Listings {
*program_counter += 1;
let destination_register = if let Token::Register(register) = self.operands.first().unwrap()
{
register.register
} else {
unreachable!()
};
let offset = match self.operands.last().unwrap() {
Token::Immediate(imm) => imm.value,
Token::Label(label) => {
if let Some(symbol) = symbols.get(label.token()) {
symbol.address() as i16 - *program_counter
} else {
undefined!(label);
0
}
}
_ => unreachable!(),
} as u16;
let instruction = 0xA000 | destination_register << 9 | offset & 0x1FF;
vec![listing!(
instruction,
*program_counter - 1,
self.line,
symbol,
"LDI",
format!("R{}", destination_register),
match self.operands.last().unwrap() {
Token::Immediate(imm) => format!("#{}", imm.value),
Token::Label(label) => label.token().to_string(),
_ => unreachable!(),
}
)]
}
}
impl Requirements for Ldi {
fn min_operands(&self) -> u64 {
2
}
fn consume(&mut self, mut tokens: VecDeque<Token>) -> VecDeque<Token> {
expect!(self, tokens, Register);
expect!(self, tokens, Label, Immediate);
operands_check!(self);
tokens
}
}
| 25.986667 | 100 | 0.500257 |
38b56e795fce7f5a1e751579d2ff0d2b282fb658 | 100,615 | #![doc = "generated by AutoRust"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::de::{value, Deserializer, IntoDeserializer};
use serde::{Deserialize, Serialize, Serializer};
use std::str::FromStr;
#[doc = "The properties that define a BGP session."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BgpSession {
#[doc = "The IPv4 prefix that contains both ends' IPv4 addresses."]
#[serde(rename = "sessionPrefixV4", default, skip_serializing_if = "Option::is_none")]
pub session_prefix_v4: Option<String>,
#[doc = "The IPv6 prefix that contains both ends' IPv6 addresses."]
#[serde(rename = "sessionPrefixV6", default, skip_serializing_if = "Option::is_none")]
pub session_prefix_v6: Option<String>,
#[doc = "The IPv4 session address on Microsoft's end."]
#[serde(rename = "microsoftSessionIPv4Address", default, skip_serializing_if = "Option::is_none")]
pub microsoft_session_i_pv4_address: Option<String>,
#[doc = "The IPv6 session address on Microsoft's end."]
#[serde(rename = "microsoftSessionIPv6Address", default, skip_serializing_if = "Option::is_none")]
pub microsoft_session_i_pv6_address: Option<String>,
#[doc = "The IPv4 session address on peer's end."]
#[serde(rename = "peerSessionIPv4Address", default, skip_serializing_if = "Option::is_none")]
pub peer_session_i_pv4_address: Option<String>,
#[doc = "The IPv6 session address on peer's end."]
#[serde(rename = "peerSessionIPv6Address", default, skip_serializing_if = "Option::is_none")]
pub peer_session_i_pv6_address: Option<String>,
#[doc = "The state of the IPv4 session."]
#[serde(rename = "sessionStateV4", default, skip_serializing_if = "Option::is_none")]
pub session_state_v4: Option<bgp_session::SessionStateV4>,
#[doc = "The state of the IPv6 session."]
#[serde(rename = "sessionStateV6", default, skip_serializing_if = "Option::is_none")]
pub session_state_v6: Option<bgp_session::SessionStateV6>,
#[doc = "The maximum number of prefixes advertised over the IPv4 session."]
#[serde(rename = "maxPrefixesAdvertisedV4", default, skip_serializing_if = "Option::is_none")]
pub max_prefixes_advertised_v4: Option<i32>,
#[doc = "The maximum number of prefixes advertised over the IPv6 session."]
#[serde(rename = "maxPrefixesAdvertisedV6", default, skip_serializing_if = "Option::is_none")]
pub max_prefixes_advertised_v6: Option<i32>,
#[doc = "The MD5 authentication key of the session."]
#[serde(rename = "md5AuthenticationKey", default, skip_serializing_if = "Option::is_none")]
pub md5_authentication_key: Option<String>,
}
impl BgpSession {
pub fn new() -> Self {
Self::default()
}
}
pub mod bgp_session {
use super::*;
#[doc = "The state of the IPv4 session."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "SessionStateV4")]
pub enum SessionStateV4 {
None,
Idle,
Connect,
Active,
OpenSent,
OpenConfirm,
OpenReceived,
Established,
PendingAdd,
PendingUpdate,
PendingRemove,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for SessionStateV4 {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for SessionStateV4 {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for SessionStateV4 {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("SessionStateV4", 0u32, "None"),
Self::Idle => serializer.serialize_unit_variant("SessionStateV4", 1u32, "Idle"),
Self::Connect => serializer.serialize_unit_variant("SessionStateV4", 2u32, "Connect"),
Self::Active => serializer.serialize_unit_variant("SessionStateV4", 3u32, "Active"),
Self::OpenSent => serializer.serialize_unit_variant("SessionStateV4", 4u32, "OpenSent"),
Self::OpenConfirm => serializer.serialize_unit_variant("SessionStateV4", 5u32, "OpenConfirm"),
Self::OpenReceived => serializer.serialize_unit_variant("SessionStateV4", 6u32, "OpenReceived"),
Self::Established => serializer.serialize_unit_variant("SessionStateV4", 7u32, "Established"),
Self::PendingAdd => serializer.serialize_unit_variant("SessionStateV4", 8u32, "PendingAdd"),
Self::PendingUpdate => serializer.serialize_unit_variant("SessionStateV4", 9u32, "PendingUpdate"),
Self::PendingRemove => serializer.serialize_unit_variant("SessionStateV4", 10u32, "PendingRemove"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The state of the IPv6 session."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "SessionStateV6")]
pub enum SessionStateV6 {
None,
Idle,
Connect,
Active,
OpenSent,
OpenConfirm,
OpenReceived,
Established,
PendingAdd,
PendingUpdate,
PendingRemove,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for SessionStateV6 {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for SessionStateV6 {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for SessionStateV6 {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("SessionStateV6", 0u32, "None"),
Self::Idle => serializer.serialize_unit_variant("SessionStateV6", 1u32, "Idle"),
Self::Connect => serializer.serialize_unit_variant("SessionStateV6", 2u32, "Connect"),
Self::Active => serializer.serialize_unit_variant("SessionStateV6", 3u32, "Active"),
Self::OpenSent => serializer.serialize_unit_variant("SessionStateV6", 4u32, "OpenSent"),
Self::OpenConfirm => serializer.serialize_unit_variant("SessionStateV6", 5u32, "OpenConfirm"),
Self::OpenReceived => serializer.serialize_unit_variant("SessionStateV6", 6u32, "OpenReceived"),
Self::Established => serializer.serialize_unit_variant("SessionStateV6", 7u32, "Established"),
Self::PendingAdd => serializer.serialize_unit_variant("SessionStateV6", 8u32, "PendingAdd"),
Self::PendingUpdate => serializer.serialize_unit_variant("SessionStateV6", 9u32, "PendingUpdate"),
Self::PendingRemove => serializer.serialize_unit_variant("SessionStateV6", 10u32, "PendingRemove"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The CDN peering prefix"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CdnPeeringPrefix {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define a CDN peering prefix"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CdnPeeringPrefixProperties>,
}
impl CdnPeeringPrefix {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of CDN peering prefixes."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CdnPeeringPrefixListResult {
#[doc = "The list of CDN peering prefixes."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<CdnPeeringPrefix>,
#[doc = "The link to fetch the next page of CDN peering prefixes."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for CdnPeeringPrefixListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl CdnPeeringPrefixListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a CDN peering prefix"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CdnPeeringPrefixProperties {
#[doc = "The prefix."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub prefix: Option<String>,
#[doc = "The Azure region."]
#[serde(rename = "azureRegion", default, skip_serializing_if = "Option::is_none")]
pub azure_region: Option<String>,
#[doc = "The Azure service."]
#[serde(rename = "azureService", default, skip_serializing_if = "Option::is_none")]
pub azure_service: Option<String>,
#[doc = "The flag that indicates whether or not this is the primary region."]
#[serde(rename = "isPrimaryRegion", default, skip_serializing_if = "Option::is_none")]
pub is_primary_region: Option<bool>,
#[doc = "The BGP Community"]
#[serde(rename = "bgpCommunity", default, skip_serializing_if = "Option::is_none")]
pub bgp_community: Option<String>,
}
impl CdnPeeringPrefixProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Class for CheckServiceProviderAvailabilityInput"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CheckServiceProviderAvailabilityInput {
#[doc = "Gets or sets the peering service location."]
#[serde(rename = "peeringServiceLocation", default, skip_serializing_if = "Option::is_none")]
pub peering_service_location: Option<String>,
#[doc = "Gets or sets the peering service provider."]
#[serde(rename = "peeringServiceProvider", default, skip_serializing_if = "Option::is_none")]
pub peering_service_provider: Option<String>,
}
impl CheckServiceProviderAvailabilityInput {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The Connection Monitor Test class."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ConnectionMonitorTest {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define a Connection Monitor Test."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ConnectionMonitorTestProperties>,
}
impl ConnectionMonitorTest {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of [T]."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ConnectionMonitorTestListResult {
#[doc = "The list of [T]."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ConnectionMonitorTest>,
#[doc = "The link to fetch the next page of [T]."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for ConnectionMonitorTestListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl ConnectionMonitorTestListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a Connection Monitor Test."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ConnectionMonitorTestProperties {
#[doc = "The Connection Monitor test source agent"]
#[serde(rename = "sourceAgent", default, skip_serializing_if = "Option::is_none")]
pub source_agent: Option<String>,
#[doc = "The Connection Monitor test destination"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub destination: Option<String>,
#[doc = "The Connection Monitor test destination port"]
#[serde(rename = "destinationPort", default, skip_serializing_if = "Option::is_none")]
pub destination_port: Option<i32>,
#[doc = "The Connection Monitor test frequency in seconds"]
#[serde(rename = "testFrequencyInSec", default, skip_serializing_if = "Option::is_none")]
pub test_frequency_in_sec: Option<i32>,
#[doc = "The flag that indicates if the Connection Monitor test is successful or not."]
#[serde(rename = "isTestSuccessful", default, skip_serializing_if = "Option::is_none")]
pub is_test_successful: Option<bool>,
#[doc = "The path representing the Connection Monitor test."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub path: Vec<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<connection_monitor_test_properties::ProvisioningState>,
}
impl ConnectionMonitorTestProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod connection_monitor_test_properties {
use super::*;
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Succeeded,
Updating,
Deleting,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Succeeded"),
Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Updating"),
Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Deleting"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The contact detail class."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ContactDetail {
#[doc = "The role of the contact."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub role: Option<contact_detail::Role>,
#[doc = "The e-mail address of the contact."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub email: Option<String>,
#[doc = "The phone number of the contact."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub phone: Option<String>,
}
impl ContactDetail {
pub fn new() -> Self {
Self::default()
}
}
pub mod contact_detail {
use super::*;
#[doc = "The role of the contact."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Role")]
pub enum Role {
Noc,
Policy,
Technical,
Service,
Escalation,
Other,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Role {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Role {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Role {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Noc => serializer.serialize_unit_variant("Role", 0u32, "Noc"),
Self::Policy => serializer.serialize_unit_variant("Role", 1u32, "Policy"),
Self::Technical => serializer.serialize_unit_variant("Role", 2u32, "Technical"),
Self::Service => serializer.serialize_unit_variant("Role", 3u32, "Service"),
Self::Escalation => serializer.serialize_unit_variant("Role", 4u32, "Escalation"),
Self::Other => serializer.serialize_unit_variant("Role", 5u32, "Other"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define a direct connection."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct DirectConnection {
#[doc = "The bandwidth of the connection."]
#[serde(rename = "bandwidthInMbps", default, skip_serializing_if = "Option::is_none")]
pub bandwidth_in_mbps: Option<i32>,
#[doc = "The bandwidth that is actually provisioned."]
#[serde(rename = "provisionedBandwidthInMbps", default, skip_serializing_if = "Option::is_none")]
pub provisioned_bandwidth_in_mbps: Option<i32>,
#[doc = "The field indicating if Microsoft provides session ip addresses."]
#[serde(rename = "sessionAddressProvider", default, skip_serializing_if = "Option::is_none")]
pub session_address_provider: Option<direct_connection::SessionAddressProvider>,
#[doc = "The flag that indicates whether or not the connection is used for peering service."]
#[serde(rename = "useForPeeringService", default, skip_serializing_if = "Option::is_none")]
pub use_for_peering_service: Option<bool>,
#[doc = "The ID used within Microsoft's peering provisioning system to track the connection"]
#[serde(rename = "microsoftTrackingId", default, skip_serializing_if = "Option::is_none")]
pub microsoft_tracking_id: Option<String>,
#[doc = "The PeeringDB.com ID of the facility at which the connection has to be set up."]
#[serde(rename = "peeringDBFacilityId", default, skip_serializing_if = "Option::is_none")]
pub peering_db_facility_id: Option<i32>,
#[doc = "The state of the connection."]
#[serde(rename = "connectionState", default, skip_serializing_if = "Option::is_none")]
pub connection_state: Option<direct_connection::ConnectionState>,
#[doc = "The properties that define a BGP session."]
#[serde(rename = "bgpSession", default, skip_serializing_if = "Option::is_none")]
pub bgp_session: Option<BgpSession>,
#[doc = "The unique identifier (GUID) for the connection."]
#[serde(rename = "connectionIdentifier", default, skip_serializing_if = "Option::is_none")]
pub connection_identifier: Option<String>,
#[doc = "The error message related to the connection state, if any."]
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
}
impl DirectConnection {
pub fn new() -> Self {
Self::default()
}
}
pub mod direct_connection {
use super::*;
#[doc = "The field indicating if Microsoft provides session ip addresses."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "SessionAddressProvider")]
pub enum SessionAddressProvider {
Microsoft,
Peer,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for SessionAddressProvider {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for SessionAddressProvider {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for SessionAddressProvider {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Microsoft => serializer.serialize_unit_variant("SessionAddressProvider", 0u32, "Microsoft"),
Self::Peer => serializer.serialize_unit_variant("SessionAddressProvider", 1u32, "Peer"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The state of the connection."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ConnectionState")]
pub enum ConnectionState {
None,
PendingApproval,
Approved,
ProvisioningStarted,
ProvisioningFailed,
ProvisioningCompleted,
Validating,
Active,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ConnectionState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ConnectionState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ConnectionState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("ConnectionState", 0u32, "None"),
Self::PendingApproval => serializer.serialize_unit_variant("ConnectionState", 1u32, "PendingApproval"),
Self::Approved => serializer.serialize_unit_variant("ConnectionState", 2u32, "Approved"),
Self::ProvisioningStarted => serializer.serialize_unit_variant("ConnectionState", 3u32, "ProvisioningStarted"),
Self::ProvisioningFailed => serializer.serialize_unit_variant("ConnectionState", 4u32, "ProvisioningFailed"),
Self::ProvisioningCompleted => serializer.serialize_unit_variant("ConnectionState", 5u32, "ProvisioningCompleted"),
Self::Validating => serializer.serialize_unit_variant("ConnectionState", 6u32, "Validating"),
Self::Active => serializer.serialize_unit_variant("ConnectionState", 7u32, "Active"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define a direct peering facility."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct DirectPeeringFacility {
#[doc = "The address of the direct peering facility."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub address: Option<String>,
#[doc = "The type of the direct peering."]
#[serde(rename = "directPeeringType", default, skip_serializing_if = "Option::is_none")]
pub direct_peering_type: Option<direct_peering_facility::DirectPeeringType>,
#[doc = "The PeeringDB.com ID of the facility."]
#[serde(rename = "peeringDBFacilityId", default, skip_serializing_if = "Option::is_none")]
pub peering_db_facility_id: Option<i32>,
#[doc = "The PeeringDB.com URL of the facility."]
#[serde(rename = "peeringDBFacilityLink", default, skip_serializing_if = "Option::is_none")]
pub peering_db_facility_link: Option<String>,
}
impl DirectPeeringFacility {
pub fn new() -> Self {
Self::default()
}
}
pub mod direct_peering_facility {
use super::*;
#[doc = "The type of the direct peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "DirectPeeringType")]
pub enum DirectPeeringType {
Edge,
Transit,
Cdn,
Internal,
Ix,
IxRs,
Voice,
EdgeZoneForOperators,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for DirectPeeringType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for DirectPeeringType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for DirectPeeringType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Edge => serializer.serialize_unit_variant("DirectPeeringType", 0u32, "Edge"),
Self::Transit => serializer.serialize_unit_variant("DirectPeeringType", 1u32, "Transit"),
Self::Cdn => serializer.serialize_unit_variant("DirectPeeringType", 2u32, "Cdn"),
Self::Internal => serializer.serialize_unit_variant("DirectPeeringType", 3u32, "Internal"),
Self::Ix => serializer.serialize_unit_variant("DirectPeeringType", 4u32, "Ix"),
Self::IxRs => serializer.serialize_unit_variant("DirectPeeringType", 5u32, "IxRs"),
Self::Voice => serializer.serialize_unit_variant("DirectPeeringType", 6u32, "Voice"),
Self::EdgeZoneForOperators => serializer.serialize_unit_variant("DirectPeeringType", 7u32, "EdgeZoneForOperators"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The error detail that describes why an operation has failed."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorDetail {
#[doc = "The error code."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[doc = "The error message."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
impl ErrorDetail {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The error response that indicates why an operation has failed."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorResponse {
#[doc = "The error detail that describes why an operation has failed."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
impl azure_core::Continuable for ErrorResponse {
fn continuation(&self) -> Option<String> {
None
}
}
impl ErrorResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define an exchange connection."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ExchangeConnection {
#[doc = "The PeeringDB.com ID of the facility at which the connection has to be set up."]
#[serde(rename = "peeringDBFacilityId", default, skip_serializing_if = "Option::is_none")]
pub peering_db_facility_id: Option<i32>,
#[doc = "The state of the connection."]
#[serde(rename = "connectionState", default, skip_serializing_if = "Option::is_none")]
pub connection_state: Option<exchange_connection::ConnectionState>,
#[doc = "The properties that define a BGP session."]
#[serde(rename = "bgpSession", default, skip_serializing_if = "Option::is_none")]
pub bgp_session: Option<BgpSession>,
#[doc = "The unique identifier (GUID) for the connection."]
#[serde(rename = "connectionIdentifier", default, skip_serializing_if = "Option::is_none")]
pub connection_identifier: Option<String>,
#[doc = "The error message related to the connection state, if any."]
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
}
impl ExchangeConnection {
pub fn new() -> Self {
Self::default()
}
}
pub mod exchange_connection {
use super::*;
#[doc = "The state of the connection."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ConnectionState")]
pub enum ConnectionState {
None,
PendingApproval,
Approved,
ProvisioningStarted,
ProvisioningFailed,
ProvisioningCompleted,
Validating,
Active,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ConnectionState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ConnectionState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ConnectionState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("ConnectionState", 0u32, "None"),
Self::PendingApproval => serializer.serialize_unit_variant("ConnectionState", 1u32, "PendingApproval"),
Self::Approved => serializer.serialize_unit_variant("ConnectionState", 2u32, "Approved"),
Self::ProvisioningStarted => serializer.serialize_unit_variant("ConnectionState", 3u32, "ProvisioningStarted"),
Self::ProvisioningFailed => serializer.serialize_unit_variant("ConnectionState", 4u32, "ProvisioningFailed"),
Self::ProvisioningCompleted => serializer.serialize_unit_variant("ConnectionState", 5u32, "ProvisioningCompleted"),
Self::Validating => serializer.serialize_unit_variant("ConnectionState", 6u32, "Validating"),
Self::Active => serializer.serialize_unit_variant("ConnectionState", 7u32, "Active"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define an exchange peering facility."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ExchangePeeringFacility {
#[doc = "The name of the exchange peering facility."]
#[serde(rename = "exchangeName", default, skip_serializing_if = "Option::is_none")]
pub exchange_name: Option<String>,
#[doc = "The bandwidth of the connection between Microsoft and the exchange peering facility."]
#[serde(rename = "bandwidthInMbps", default, skip_serializing_if = "Option::is_none")]
pub bandwidth_in_mbps: Option<i32>,
#[doc = "The IPv4 address of Microsoft at the exchange peering facility."]
#[serde(rename = "microsoftIPv4Address", default, skip_serializing_if = "Option::is_none")]
pub microsoft_i_pv4_address: Option<String>,
#[doc = "The IPv6 address of Microsoft at the exchange peering facility."]
#[serde(rename = "microsoftIPv6Address", default, skip_serializing_if = "Option::is_none")]
pub microsoft_i_pv6_address: Option<String>,
#[doc = "The IPv4 prefixes associated with the exchange peering facility."]
#[serde(rename = "facilityIPv4Prefix", default, skip_serializing_if = "Option::is_none")]
pub facility_i_pv4_prefix: Option<String>,
#[doc = "The IPv6 prefixes associated with the exchange peering facility."]
#[serde(rename = "facilityIPv6Prefix", default, skip_serializing_if = "Option::is_none")]
pub facility_i_pv6_prefix: Option<String>,
#[doc = "The PeeringDB.com ID of the facility."]
#[serde(rename = "peeringDBFacilityId", default, skip_serializing_if = "Option::is_none")]
pub peering_db_facility_id: Option<i32>,
#[doc = "The PeeringDB.com URL of the facility."]
#[serde(rename = "peeringDBFacilityLink", default, skip_serializing_if = "Option::is_none")]
pub peering_db_facility_link: Option<String>,
}
impl ExchangePeeringFacility {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a Log Analytics Workspace."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct LogAnalyticsWorkspaceProperties {
#[doc = "The Workspace ID."]
#[serde(rename = "workspaceID", default, skip_serializing_if = "Option::is_none")]
pub workspace_id: Option<String>,
#[doc = "The Workspace Key."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
#[doc = "The list of connected agents."]
#[serde(rename = "connectedAgents", default, skip_serializing_if = "Vec::is_empty")]
pub connected_agents: Vec<String>,
}
impl LogAnalyticsWorkspaceProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Looking glass output model"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct LookingGlassOutput {
#[doc = "Invoked command"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub command: Option<looking_glass_output::Command>,
#[doc = "Output of the command"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub output: Option<String>,
}
impl LookingGlassOutput {
pub fn new() -> Self {
Self::default()
}
}
pub mod looking_glass_output {
use super::*;
#[doc = "Invoked command"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Command")]
pub enum Command {
Traceroute,
Ping,
BgpRoute,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Command {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Command {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Command {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Traceroute => serializer.serialize_unit_variant("Command", 0u32, "Traceroute"),
Self::Ping => serializer.serialize_unit_variant("Command", 1u32, "Ping"),
Self::BgpRoute => serializer.serialize_unit_variant("Command", 2u32, "BgpRoute"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "Dimensions of the metric."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct MetricDimension {
#[doc = "Name of the dimension."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Localized friendly display name of the dimension."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
impl MetricDimension {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Specifications of the Metrics for Azure Monitoring."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct MetricSpecification {
#[doc = "Name of the metric."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Localized friendly display name of the metric."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "Localized friendly description of the metric."]
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[doc = "Unit that makes sense for the metric."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[doc = "Aggregation type will be set to one of the values: Average, Minimum, Maximum, Total, Count."]
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<String>,
#[doc = "Supported time grain types for the metric."]
#[serde(rename = "supportedTimeGrainTypes", default, skip_serializing_if = "Vec::is_empty")]
pub supported_time_grain_types: Vec<String>,
#[doc = "Dimensions of the metric."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dimensions: Vec<MetricDimension>,
}
impl MetricSpecification {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The peering API operation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Operation {
#[doc = "The name of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The information related to the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplayInfo>,
#[doc = "The flag that indicates whether the operation applies to data plane."]
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[doc = "The properties of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<OperationProperties>,
}
impl Operation {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The information related to the operation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct OperationDisplayInfo {
#[doc = "The name of the resource provider."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[doc = "The type of the resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[doc = "The name of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[doc = "The description of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
impl OperationDisplayInfo {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering API operations."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct OperationListResult {
#[doc = "The list of peering API operations."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[doc = "The link to fetch the next page of peering API operations."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for OperationListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl OperationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties of the operation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct OperationProperties {
#[doc = "Service specification payload."]
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<ServiceSpecification>,
}
impl OperationProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The essential information related to the peer's ASN."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeerAsn {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define a peer's ASN."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeerAsnProperties>,
}
impl PeerAsn {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peer ASNs."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeerAsnListResult {
#[doc = "The list of peer ASNs."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeerAsn>,
#[doc = "The link to fetch the next page of peer ASNs."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeerAsnListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeerAsnListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a peer's ASN."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeerAsnProperties {
#[doc = "The Autonomous System Number (ASN) of the peer."]
#[serde(rename = "peerAsn", default, skip_serializing_if = "Option::is_none")]
pub peer_asn: Option<i32>,
#[doc = "The contact details of the peer."]
#[serde(rename = "peerContactDetail", default, skip_serializing_if = "Vec::is_empty")]
pub peer_contact_detail: Vec<ContactDetail>,
#[doc = "The name of the peer."]
#[serde(rename = "peerName", default, skip_serializing_if = "Option::is_none")]
pub peer_name: Option<String>,
#[doc = "The validation state of the ASN associated with the peer."]
#[serde(rename = "validationState", default, skip_serializing_if = "Option::is_none")]
pub validation_state: Option<peer_asn_properties::ValidationState>,
#[doc = "The error message for the validation state"]
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
}
impl PeerAsnProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod peer_asn_properties {
use super::*;
#[doc = "The validation state of the ASN associated with the peer."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ValidationState")]
pub enum ValidationState {
None,
Pending,
Approved,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ValidationState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ValidationState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ValidationState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("ValidationState", 0u32, "None"),
Self::Pending => serializer.serialize_unit_variant("ValidationState", 1u32, "Pending"),
Self::Approved => serializer.serialize_unit_variant("ValidationState", 2u32, "Approved"),
Self::Failed => serializer.serialize_unit_variant("ValidationState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "Peering is a logical representation of a set of connections to the Microsoft Cloud Edge at a location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Peering {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The SKU that defines the tier and kind of the peering."]
pub sku: PeeringSku,
#[doc = "The kind of the peering."]
pub kind: peering::Kind,
#[doc = "The properties that define connectivity to the Microsoft Cloud Edge."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringProperties>,
#[doc = "The location of the resource."]
pub location: String,
#[doc = "The resource tags."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl Peering {
pub fn new(sku: PeeringSku, kind: peering::Kind, location: String) -> Self {
Self {
resource: Resource::default(),
sku,
kind,
properties: None,
location,
tags: None,
}
}
}
pub mod peering {
use super::*;
#[doc = "The kind of the peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Kind")]
pub enum Kind {
Direct,
Exchange,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Kind {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Kind {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Kind {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Direct => serializer.serialize_unit_variant("Kind", 0u32, "Direct"),
Self::Exchange => serializer.serialize_unit_variant("Kind", 1u32, "Exchange"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define a peering bandwidth offer."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringBandwidthOffer {
#[doc = "The name of the bandwidth offer."]
#[serde(rename = "offerName", default, skip_serializing_if = "Option::is_none")]
pub offer_name: Option<String>,
#[doc = "The value of the bandwidth offer in Mbps."]
#[serde(rename = "valueInMbps", default, skip_serializing_if = "Option::is_none")]
pub value_in_mbps: Option<i32>,
}
impl PeeringBandwidthOffer {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peerings."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringListResult {
#[doc = "The list of peerings."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Peering>,
#[doc = "The link to fetch the next page of peerings."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Peering location is where connectivity could be established to the Microsoft Cloud Edge."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringLocation {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The kind of peering that the peering location supports."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<peering_location::Kind>,
#[doc = "The properties that define a peering location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringLocationProperties>,
}
impl PeeringLocation {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_location {
use super::*;
#[doc = "The kind of peering that the peering location supports."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Kind")]
pub enum Kind {
Direct,
Exchange,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Kind {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Kind {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Kind {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Direct => serializer.serialize_unit_variant("Kind", 0u32, "Direct"),
Self::Exchange => serializer.serialize_unit_variant("Kind", 1u32, "Exchange"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The paginated list of peering locations."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringLocationListResult {
#[doc = "The list of peering locations."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringLocation>,
#[doc = "The link to fetch the next page of peering locations."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringLocationListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringLocationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a peering location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringLocationProperties {
#[doc = "The properties that define a direct peering location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<PeeringLocationPropertiesDirect>,
#[doc = "The properties that define an exchange peering location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub exchange: Option<PeeringLocationPropertiesExchange>,
#[doc = "The name of the peering location."]
#[serde(rename = "peeringLocation", default, skip_serializing_if = "Option::is_none")]
pub peering_location: Option<String>,
#[doc = "The country in which the peering location exists."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
#[doc = "The Azure region associated with the peering location."]
#[serde(rename = "azureRegion", default, skip_serializing_if = "Option::is_none")]
pub azure_region: Option<String>,
}
impl PeeringLocationProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a direct peering location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringLocationPropertiesDirect {
#[doc = "The list of direct peering facilities at the peering location."]
#[serde(rename = "peeringFacilities", default, skip_serializing_if = "Vec::is_empty")]
pub peering_facilities: Vec<DirectPeeringFacility>,
#[doc = "The list of bandwidth offers available at the peering location."]
#[serde(rename = "bandwidthOffers", default, skip_serializing_if = "Vec::is_empty")]
pub bandwidth_offers: Vec<PeeringBandwidthOffer>,
}
impl PeeringLocationPropertiesDirect {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define an exchange peering location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringLocationPropertiesExchange {
#[doc = "The list of exchange peering facilities at the peering location."]
#[serde(rename = "peeringFacilities", default, skip_serializing_if = "Vec::is_empty")]
pub peering_facilities: Vec<ExchangePeeringFacility>,
}
impl PeeringLocationPropertiesExchange {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define connectivity to the Microsoft Cloud Edge."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringProperties {
#[doc = "The properties that define a direct peering."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<PeeringPropertiesDirect>,
#[doc = "The properties that define an exchange peering."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub exchange: Option<PeeringPropertiesExchange>,
#[doc = "The location of the peering."]
#[serde(rename = "peeringLocation", default, skip_serializing_if = "Option::is_none")]
pub peering_location: Option<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<peering_properties::ProvisioningState>,
}
impl PeeringProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_properties {
use super::*;
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Succeeded,
Updating,
Deleting,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Succeeded"),
Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Updating"),
Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Deleting"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define a direct peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringPropertiesDirect {
#[doc = "The set of connections that constitute a direct peering."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub connections: Vec<DirectConnection>,
#[doc = "The flag that indicates whether or not the peering is used for peering service."]
#[serde(rename = "useForPeeringService", default, skip_serializing_if = "Option::is_none")]
pub use_for_peering_service: Option<bool>,
#[doc = "The sub resource."]
#[serde(rename = "peerAsn", default, skip_serializing_if = "Option::is_none")]
pub peer_asn: Option<SubResource>,
#[doc = "The type of direct peering."]
#[serde(rename = "directPeeringType", default, skip_serializing_if = "Option::is_none")]
pub direct_peering_type: Option<peering_properties_direct::DirectPeeringType>,
}
impl PeeringPropertiesDirect {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_properties_direct {
use super::*;
#[doc = "The type of direct peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "DirectPeeringType")]
pub enum DirectPeeringType {
Edge,
Transit,
Cdn,
Internal,
Ix,
IxRs,
Voice,
EdgeZoneForOperators,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for DirectPeeringType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for DirectPeeringType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for DirectPeeringType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Edge => serializer.serialize_unit_variant("DirectPeeringType", 0u32, "Edge"),
Self::Transit => serializer.serialize_unit_variant("DirectPeeringType", 1u32, "Transit"),
Self::Cdn => serializer.serialize_unit_variant("DirectPeeringType", 2u32, "Cdn"),
Self::Internal => serializer.serialize_unit_variant("DirectPeeringType", 3u32, "Internal"),
Self::Ix => serializer.serialize_unit_variant("DirectPeeringType", 4u32, "Ix"),
Self::IxRs => serializer.serialize_unit_variant("DirectPeeringType", 5u32, "IxRs"),
Self::Voice => serializer.serialize_unit_variant("DirectPeeringType", 6u32, "Voice"),
Self::EdgeZoneForOperators => serializer.serialize_unit_variant("DirectPeeringType", 7u32, "EdgeZoneForOperators"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define an exchange peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringPropertiesExchange {
#[doc = "The set of connections that constitute an exchange peering."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub connections: Vec<ExchangeConnection>,
#[doc = "The sub resource."]
#[serde(rename = "peerAsn", default, skip_serializing_if = "Option::is_none")]
pub peer_asn: Option<SubResource>,
}
impl PeeringPropertiesExchange {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a received route."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringReceivedRoute {
#[doc = "The prefix."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub prefix: Option<String>,
#[doc = "The next hop for the prefix."]
#[serde(rename = "nextHop", default, skip_serializing_if = "Option::is_none")]
pub next_hop: Option<String>,
#[doc = "The AS path for the prefix."]
#[serde(rename = "asPath", default, skip_serializing_if = "Option::is_none")]
pub as_path: Option<String>,
#[doc = "The origin AS change information for the prefix."]
#[serde(rename = "originAsValidationState", default, skip_serializing_if = "Option::is_none")]
pub origin_as_validation_state: Option<String>,
#[doc = "The RPKI validation state for the prefix and origin AS that's listed in the AS path."]
#[serde(rename = "rpkiValidationState", default, skip_serializing_if = "Option::is_none")]
pub rpki_validation_state: Option<String>,
#[doc = "The authority which holds the Route Origin Authorization record for the prefix, if any."]
#[serde(rename = "trustAnchor", default, skip_serializing_if = "Option::is_none")]
pub trust_anchor: Option<String>,
#[doc = "The received timestamp associated with the prefix."]
#[serde(rename = "receivedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub received_timestamp: Option<String>,
}
impl PeeringReceivedRoute {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of received routes for the peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringReceivedRouteListResult {
#[doc = "The list of received routes for the peering."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringReceivedRoute>,
#[doc = "The link to fetch the next page of received routes for the peering."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringReceivedRouteListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringReceivedRouteListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The customer's ASN that is registered by the peering service provider."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringRegisteredAsn {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define a registered ASN."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringRegisteredAsnProperties>,
}
impl PeeringRegisteredAsn {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering registered ASNs."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringRegisteredAsnListResult {
#[doc = "The list of peering registered ASNs."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringRegisteredAsn>,
#[doc = "The link to fetch the next page of peering registered ASNs."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringRegisteredAsnListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringRegisteredAsnListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a registered ASN."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringRegisteredAsnProperties {
#[doc = "The customer's ASN from which traffic originates."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub asn: Option<i32>,
#[doc = "The peering service prefix key that is to be shared with the customer."]
#[serde(rename = "peeringServicePrefixKey", default, skip_serializing_if = "Option::is_none")]
pub peering_service_prefix_key: Option<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<peering_registered_asn_properties::ProvisioningState>,
}
impl PeeringRegisteredAsnProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_registered_asn_properties {
use super::*;
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Succeeded,
Updating,
Deleting,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Succeeded"),
Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Updating"),
Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Deleting"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The customer's prefix that is registered by the peering service provider."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringRegisteredPrefix {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define a registered prefix."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringRegisteredPrefixProperties>,
}
impl PeeringRegisteredPrefix {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering registered prefixes."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringRegisteredPrefixListResult {
#[doc = "The list of peering registered prefixes."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringRegisteredPrefix>,
#[doc = "The link to fetch the next page of peering registered prefixes."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringRegisteredPrefixListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringRegisteredPrefixListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define a registered prefix."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringRegisteredPrefixProperties {
#[doc = "The customer's prefix from which traffic originates."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub prefix: Option<String>,
#[doc = "The prefix validation state."]
#[serde(rename = "prefixValidationState", default, skip_serializing_if = "Option::is_none")]
pub prefix_validation_state: Option<peering_registered_prefix_properties::PrefixValidationState>,
#[doc = "The peering service prefix key that is to be shared with the customer."]
#[serde(rename = "peeringServicePrefixKey", default, skip_serializing_if = "Option::is_none")]
pub peering_service_prefix_key: Option<String>,
#[doc = "The error message associated with the validation state, if any."]
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<peering_registered_prefix_properties::ProvisioningState>,
}
impl PeeringRegisteredPrefixProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_registered_prefix_properties {
use super::*;
#[doc = "The prefix validation state."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "PrefixValidationState")]
pub enum PrefixValidationState {
None,
Invalid,
Verified,
Failed,
Pending,
Warning,
Unknown,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for PrefixValidationState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for PrefixValidationState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for PrefixValidationState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("PrefixValidationState", 0u32, "None"),
Self::Invalid => serializer.serialize_unit_variant("PrefixValidationState", 1u32, "Invalid"),
Self::Verified => serializer.serialize_unit_variant("PrefixValidationState", 2u32, "Verified"),
Self::Failed => serializer.serialize_unit_variant("PrefixValidationState", 3u32, "Failed"),
Self::Pending => serializer.serialize_unit_variant("PrefixValidationState", 4u32, "Pending"),
Self::Warning => serializer.serialize_unit_variant("PrefixValidationState", 5u32, "Warning"),
Self::Unknown => serializer.serialize_unit_variant("PrefixValidationState", 6u32, "Unknown"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Succeeded,
Updating,
Deleting,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Succeeded"),
Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Updating"),
Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Deleting"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "Peering Service"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PeeringService {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The SKU that defines the type of the peering service."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<PeeringServiceSku>,
#[doc = "The properties that define connectivity to the Peering Service."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringServiceProperties>,
#[doc = "The location of the resource."]
pub location: String,
#[doc = "The resource tags."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl PeeringService {
pub fn new(location: String) -> Self {
Self {
resource: Resource::default(),
sku: None,
properties: None,
location,
tags: None,
}
}
}
#[doc = "The peering service country."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceCountry {
#[serde(flatten)]
pub resource: Resource,
}
impl PeeringServiceCountry {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering service countries."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceCountryListResult {
#[doc = "The list of peering service countries."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringServiceCountry>,
#[doc = "The link to fetch the next page of peering service countries."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringServiceCountryListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringServiceCountryListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering services."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceListResult {
#[doc = "The list of peering services."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringService>,
#[doc = "The link to fetch the next page of peering services."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringServiceListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringServiceListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The peering service location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceLocation {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define connectivity to the Peering Service Location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringServiceLocationProperties>,
}
impl PeeringServiceLocation {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering service locations."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceLocationListResult {
#[doc = "The list of peering service locations."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringServiceLocation>,
#[doc = "The link to fetch the next page of peering service locations."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringServiceLocationListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringServiceLocationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define connectivity to the Peering Service Location."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceLocationProperties {
#[doc = "Country of the customer"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
#[doc = "State of the customer"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
#[doc = "Azure region for the location"]
#[serde(rename = "azureRegion", default, skip_serializing_if = "Option::is_none")]
pub azure_region: Option<String>,
}
impl PeeringServiceLocationProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The peering service prefix class."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServicePrefix {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The peering service prefix properties class."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringServicePrefixProperties>,
}
impl PeeringServicePrefix {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The details of the event associated with a prefix."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServicePrefixEvent {
#[doc = "The timestamp of the event associated with a prefix."]
#[serde(rename = "eventTimestamp", default, skip_serializing_if = "Option::is_none")]
pub event_timestamp: Option<String>,
#[doc = "The type of the event associated with a prefix."]
#[serde(rename = "eventType", default, skip_serializing_if = "Option::is_none")]
pub event_type: Option<String>,
#[doc = "The summary of the event associated with a prefix."]
#[serde(rename = "eventSummary", default, skip_serializing_if = "Option::is_none")]
pub event_summary: Option<String>,
#[doc = "The level of the event associated with a prefix."]
#[serde(rename = "eventLevel", default, skip_serializing_if = "Option::is_none")]
pub event_level: Option<String>,
#[doc = "The description of the event associated with a prefix."]
#[serde(rename = "eventDescription", default, skip_serializing_if = "Option::is_none")]
pub event_description: Option<String>,
}
impl PeeringServicePrefixEvent {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering service prefixes."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServicePrefixListResult {
#[doc = "The list of peering service prefixes."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringServicePrefix>,
#[doc = "The link to fetch the next page of peering service prefixes."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringServicePrefixListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringServicePrefixListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The peering service prefix properties class."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServicePrefixProperties {
#[doc = "The prefix from which your traffic originates."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub prefix: Option<String>,
#[doc = "The prefix validation state"]
#[serde(rename = "prefixValidationState", default, skip_serializing_if = "Option::is_none")]
pub prefix_validation_state: Option<peering_service_prefix_properties::PrefixValidationState>,
#[doc = "The prefix learned type"]
#[serde(rename = "learnedType", default, skip_serializing_if = "Option::is_none")]
pub learned_type: Option<peering_service_prefix_properties::LearnedType>,
#[doc = "The error message for validation state"]
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
#[doc = "The list of events for peering service prefix"]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub events: Vec<PeeringServicePrefixEvent>,
#[doc = "The peering service prefix key"]
#[serde(rename = "peeringServicePrefixKey", default, skip_serializing_if = "Option::is_none")]
pub peering_service_prefix_key: Option<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<peering_service_prefix_properties::ProvisioningState>,
}
impl PeeringServicePrefixProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_service_prefix_properties {
use super::*;
#[doc = "The prefix validation state"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "PrefixValidationState")]
pub enum PrefixValidationState {
None,
Invalid,
Verified,
Failed,
Pending,
Warning,
Unknown,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for PrefixValidationState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for PrefixValidationState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for PrefixValidationState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("PrefixValidationState", 0u32, "None"),
Self::Invalid => serializer.serialize_unit_variant("PrefixValidationState", 1u32, "Invalid"),
Self::Verified => serializer.serialize_unit_variant("PrefixValidationState", 2u32, "Verified"),
Self::Failed => serializer.serialize_unit_variant("PrefixValidationState", 3u32, "Failed"),
Self::Pending => serializer.serialize_unit_variant("PrefixValidationState", 4u32, "Pending"),
Self::Warning => serializer.serialize_unit_variant("PrefixValidationState", 5u32, "Warning"),
Self::Unknown => serializer.serialize_unit_variant("PrefixValidationState", 6u32, "Unknown"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The prefix learned type"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "LearnedType")]
pub enum LearnedType {
None,
ViaServiceProvider,
ViaSession,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for LearnedType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for LearnedType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for LearnedType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_unit_variant("LearnedType", 0u32, "None"),
Self::ViaServiceProvider => serializer.serialize_unit_variant("LearnedType", 1u32, "ViaServiceProvider"),
Self::ViaSession => serializer.serialize_unit_variant("LearnedType", 2u32, "ViaSession"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Succeeded,
Updating,
Deleting,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Succeeded"),
Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Updating"),
Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Deleting"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The properties that define connectivity to the Peering Service."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceProperties {
#[doc = "The location (state/province) of the customer."]
#[serde(rename = "peeringServiceLocation", default, skip_serializing_if = "Option::is_none")]
pub peering_service_location: Option<String>,
#[doc = "The name of the service provider."]
#[serde(rename = "peeringServiceProvider", default, skip_serializing_if = "Option::is_none")]
pub peering_service_provider: Option<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<peering_service_properties::ProvisioningState>,
#[doc = "The primary peering (Microsoft/service provider) location to be used for customer traffic."]
#[serde(rename = "providerPrimaryPeeringLocation", default, skip_serializing_if = "Option::is_none")]
pub provider_primary_peering_location: Option<String>,
#[doc = "The backup peering (Microsoft/service provider) location to be used for customer traffic."]
#[serde(rename = "providerBackupPeeringLocation", default, skip_serializing_if = "Option::is_none")]
pub provider_backup_peering_location: Option<String>,
#[doc = "The properties that define a Log Analytics Workspace."]
#[serde(rename = "logAnalyticsWorkspaceProperties", default, skip_serializing_if = "Option::is_none")]
pub log_analytics_workspace_properties: Option<LogAnalyticsWorkspaceProperties>,
}
impl PeeringServiceProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_service_properties {
use super::*;
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Succeeded,
Updating,
Deleting,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Succeeded"),
Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Updating"),
Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Deleting"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "PeeringService provider"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceProvider {
#[serde(flatten)]
pub resource: Resource,
#[doc = "The properties that define connectivity to the Peering Service Provider."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PeeringServiceProviderProperties>,
}
impl PeeringServiceProvider {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The paginated list of peering service providers."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceProviderListResult {
#[doc = "The list of peering service providers."]
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PeeringServiceProvider>,
#[doc = "The link to fetch the next page of peering service providers."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for PeeringServiceProviderListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl PeeringServiceProviderListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The properties that define connectivity to the Peering Service Provider."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceProviderProperties {
#[doc = "The name of the service provider."]
#[serde(rename = "serviceProviderName", default, skip_serializing_if = "Option::is_none")]
pub service_provider_name: Option<String>,
#[doc = "The list of locations at which the service provider peers with Microsoft."]
#[serde(rename = "peeringLocations", default, skip_serializing_if = "Vec::is_empty")]
pub peering_locations: Vec<String>,
}
impl PeeringServiceProviderProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The SKU that defines the type of the peering service."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringServiceSku {
#[doc = "The name of the peering service SKU."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
impl PeeringServiceSku {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The SKU that defines the tier and kind of the peering."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PeeringSku {
#[doc = "The name of the peering SKU."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The tier of the peering SKU."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<peering_sku::Tier>,
#[doc = "The family of the peering SKU."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub family: Option<peering_sku::Family>,
#[doc = "The size of the peering SKU."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub size: Option<peering_sku::Size>,
}
impl PeeringSku {
pub fn new() -> Self {
Self::default()
}
}
pub mod peering_sku {
use super::*;
#[doc = "The tier of the peering SKU."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Tier")]
pub enum Tier {
Basic,
Premium,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Tier {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Tier {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Tier {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Basic => serializer.serialize_unit_variant("Tier", 0u32, "Basic"),
Self::Premium => serializer.serialize_unit_variant("Tier", 1u32, "Premium"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The family of the peering SKU."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Family")]
pub enum Family {
Direct,
Exchange,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Family {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Family {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Family {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Direct => serializer.serialize_unit_variant("Family", 0u32, "Direct"),
Self::Exchange => serializer.serialize_unit_variant("Family", 1u32, "Exchange"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The size of the peering SKU."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Size")]
pub enum Size {
Free,
Metered,
Unlimited,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Size {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Size {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Size {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Free => serializer.serialize_unit_variant("Size", 0u32, "Free"),
Self::Metered => serializer.serialize_unit_variant("Size", 1u32, "Metered"),
Self::Unlimited => serializer.serialize_unit_variant("Size", 2u32, "Unlimited"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The ARM resource class."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Resource {
#[doc = "The name of the resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The ID of the resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The type of the resource."]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
impl Resource {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The resource tags."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ResourceTags {
#[doc = "Gets or sets the tags, a dictionary of descriptors arm object"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl ResourceTags {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Service specification payload."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ServiceSpecification {
#[doc = "Specifications of the Metrics for Azure Monitoring."]
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<MetricSpecification>,
}
impl ServiceSpecification {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The sub resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubResource {
#[doc = "The identifier of the referenced resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
impl SubResource {
pub fn new() -> Self {
Self::default()
}
}
| 41.922917 | 131 | 0.648035 |
8a815445129f4a9c9a788df6f84590e24106a769 | 1,446 | use aoc::*;
fn main() -> Result<()> {
let file = std::fs::read_to_string("input/03.txt")?;
let input: Vec<BitVec> = file
.lines()
.map(|line| line.chars().map(|c| c == '1').collect())
.collect();
solve(&input).ok_or(AocError::Logic)?;
Ok(())
}
fn solve(input: &[BitVec]) -> Option<()> {
let gamma = counts(&input, true)?;
let epsilon = counts(&input, false)?;
println!("1: {}", gamma * epsilon);
let oxygen = reduces(&input, false)?;
let co2 = reduces(&input, true)?;
println!("2: {}", oxygen * co2);
Some(())
}
fn counts(input: &[BitVec], invert: bool) -> Option<i32> {
let mut most_common = BitVec::new();
for i in 0..input.first()?.len() {
let tally = input.iter().map(|bits| bits[i]).counts();
most_common.push((tally[&true] >= tally[&false]) ^ invert);
}
Some(to_binary(&most_common))
}
fn reduces(input: &[BitVec], invert: bool) -> Option<i32> {
let mut input = Vec::from(input);
for i in 0..input.first()?.len() {
if input.len() == 1 {
break;
}
let tally = input.iter().map(|line| line[i]).counts();
let keep = (tally[&true] >= tally[&false]) ^ invert;
input.retain(|line| line[i] == keep);
}
Some(to_binary(input.first()?))
}
fn to_binary(digits: &BitSlice) -> i32 {
digits
.iter()
.fold(0, |total, bit| 2 * total + if *bit { 1 } else { 0 })
}
| 28.352941 | 67 | 0.533195 |
ccf0fb7273837b57577ee97fe91f677e4557aab2 | 12,904 | use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::Span;
use super::explicit::ExplicitPredicatesMap;
use super::utils::*;
/// Infer predicates for the items in the crate.
///
/// `global_inferred_outlives`: this is initially the empty map that
/// was generated by walking the items in the crate. This will
/// now be filled with inferred predicates.
pub fn infer_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
explicit_map: &mut ExplicitPredicatesMap<'tcx>,
) -> FxHashMap<DefId, RequiredPredicates<'tcx>> {
debug!("infer_predicates");
let mut predicates_added = true;
let mut global_inferred_outlives = FxHashMap::default();
// If new predicates were added then we need to re-calculate
// all crates since there could be new implied predicates.
while predicates_added {
predicates_added = false;
let mut visitor = InferVisitor {
tcx,
global_inferred_outlives: &mut global_inferred_outlives,
predicates_added: &mut predicates_added,
explicit_map,
};
// Visit all the crates and infer predicates
tcx.hir().krate().visit_all_item_likes(&mut visitor);
}
global_inferred_outlives
}
pub struct InferVisitor<'cx, 'tcx> {
tcx: TyCtxt<'tcx>,
global_inferred_outlives: &'cx mut FxHashMap<DefId, RequiredPredicates<'tcx>>,
predicates_added: &'cx mut bool,
explicit_map: &'cx mut ExplicitPredicatesMap<'tcx>,
}
impl<'cx, 'tcx> ItemLikeVisitor<'tcx> for InferVisitor<'cx, 'tcx> {
fn visit_item(&mut self, item: &hir::Item<'_>) {
let item_did = item.def_id;
debug!("InferVisitor::visit_item(item={:?})", item_did);
let mut item_required_predicates = RequiredPredicates::default();
match item.kind {
hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) | hir::ItemKind::Struct(..) => {
let adt_def = self.tcx.adt_def(item_did.to_def_id());
// Iterate over all fields in item_did
for field_def in adt_def.all_fields() {
// Calculating the predicate requirements necessary
// for item_did.
//
// For field of type &'a T (reference) or Adt
// (struct/enum/union) there will be outlive
// requirements for adt_def.
let field_ty = self.tcx.type_of(field_def.did);
let field_span = self.tcx.def_span(field_def.did);
insert_required_predicates_to_be_wf(
self.tcx,
field_ty,
field_span,
self.global_inferred_outlives,
&mut item_required_predicates,
&mut self.explicit_map,
);
}
}
_ => {}
};
// If new predicates were added (`local_predicate_map` has more
// predicates than the `global_inferred_outlives`), the new predicates
// might result in implied predicates for their parent types.
// Therefore mark `predicates_added` as true and which will ensure
// we walk the crates again and re-calculate predicates for all
// items.
let item_predicates_len: usize =
self.global_inferred_outlives.get(&item_did.to_def_id()).map_or(0, |p| p.len());
if item_required_predicates.len() > item_predicates_len {
*self.predicates_added = true;
self.global_inferred_outlives.insert(item_did.to_def_id(), item_required_predicates);
}
}
fn visit_trait_item(&mut self, _trait_item: &'tcx hir::TraitItem<'tcx>) {}
fn visit_impl_item(&mut self, _impl_item: &'tcx hir::ImplItem<'tcx>) {}
fn visit_foreign_item(&mut self, _foreign_item: &'tcx hir::ForeignItem<'tcx>) {}
}
fn insert_required_predicates_to_be_wf<'tcx>(
tcx: TyCtxt<'tcx>,
field_ty: Ty<'tcx>,
field_span: Span,
global_inferred_outlives: &FxHashMap<DefId, RequiredPredicates<'tcx>>,
required_predicates: &mut RequiredPredicates<'tcx>,
explicit_map: &mut ExplicitPredicatesMap<'tcx>,
) {
for arg in field_ty.walk() {
let ty = match arg.unpack() {
GenericArgKind::Type(ty) => ty,
// No predicates from lifetimes or constants, except potentially
// constants' types, but `walk` will get to them as well.
GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
};
match *ty.kind() {
// The field is of type &'a T which means that we will have
// a predicate requirement of T: 'a (T outlives 'a).
//
// We also want to calculate potential predicates for the T
ty::Ref(region, rty, _) => {
debug!("Ref");
insert_outlives_predicate(tcx, rty.into(), region, field_span, required_predicates);
}
// For each Adt (struct/enum/union) type `Foo<'a, T>`, we
// can load the current set of inferred and explicit
// predicates from `global_inferred_outlives` and filter the
// ones that are TypeOutlives.
ty::Adt(def, substs) => {
// First check the inferred predicates
//
// Example 1:
//
// struct Foo<'a, T> {
// field1: Bar<'a, T>
// }
//
// struct Bar<'b, U> {
// field2: &'b U
// }
//
// Here, when processing the type of `field1`, we would
// request the set of implicit predicates computed for `Bar`
// thus far. This will initially come back empty, but in next
// round we will get `U: 'b`. We then apply the substitution
// `['b => 'a, U => T]` and thus get the requirement that `T:
// 'a` holds for `Foo`.
debug!("Adt");
if let Some(unsubstituted_predicates) = global_inferred_outlives.get(&def.did) {
for (unsubstituted_predicate, &span) in unsubstituted_predicates {
// `unsubstituted_predicate` is `U: 'b` in the
// example above. So apply the substitution to
// get `T: 'a` (or `predicate`):
let predicate = unsubstituted_predicate.subst(tcx, substs);
insert_outlives_predicate(
tcx,
predicate.0,
predicate.1,
span,
required_predicates,
);
}
}
// Check if the type has any explicit predicates that need
// to be added to `required_predicates`
// let _: () = substs.region_at(0);
check_explicit_predicates(
tcx,
def.did,
substs,
required_predicates,
explicit_map,
None,
);
}
ty::Dynamic(obj, ..) => {
// This corresponds to `dyn Trait<..>`. In this case, we should
// use the explicit predicates as well.
debug!("Dynamic");
debug!("field_ty = {}", &field_ty);
debug!("ty in field = {}", &ty);
if let Some(ex_trait_ref) = obj.principal() {
// Here, we are passing the type `usize` as a
// placeholder value with the function
// `with_self_ty`, since there is no concrete type
// `Self` for a `dyn Trait` at this
// stage. Therefore when checking explicit
// predicates in `check_explicit_predicates` we
// need to ignore checking the explicit_map for
// Self type.
let substs =
ex_trait_ref.with_self_ty(tcx, tcx.types.usize).skip_binder().substs;
check_explicit_predicates(
tcx,
ex_trait_ref.skip_binder().def_id,
substs,
required_predicates,
explicit_map,
Some(tcx.types.self_param),
);
}
}
ty::Projection(obj) => {
// This corresponds to `<T as Foo<'a>>::Bar`. In this case, we should use the
// explicit predicates as well.
debug!("Projection");
check_explicit_predicates(
tcx,
tcx.associated_item(obj.item_def_id).container.id(),
obj.substs,
required_predicates,
explicit_map,
None,
);
}
_ => {}
}
}
}
/// We also have to check the explicit predicates
/// declared on the type.
///
/// struct Foo<'a, T> {
/// field1: Bar<T>
/// }
///
/// struct Bar<U> where U: 'static, U: Foo {
/// ...
/// }
///
/// Here, we should fetch the explicit predicates, which
/// will give us `U: 'static` and `U: Foo`. The latter we
/// can ignore, but we will want to process `U: 'static`,
/// applying the substitution as above.
pub fn check_explicit_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
substs: &[GenericArg<'tcx>],
required_predicates: &mut RequiredPredicates<'tcx>,
explicit_map: &mut ExplicitPredicatesMap<'tcx>,
ignored_self_ty: Option<Ty<'tcx>>,
) {
debug!(
"check_explicit_predicates(def_id={:?}, \
substs={:?}, \
explicit_map={:?}, \
required_predicates={:?}, \
ignored_self_ty={:?})",
def_id, substs, explicit_map, required_predicates, ignored_self_ty,
);
let explicit_predicates = explicit_map.explicit_predicates_of(tcx, def_id);
for (outlives_predicate, &span) in explicit_predicates {
debug!("outlives_predicate = {:?}", &outlives_predicate);
// Careful: If we are inferring the effects of a `dyn Trait<..>`
// type, then when we look up the predicates for `Trait`,
// we may find some that reference `Self`. e.g., perhaps the
// definition of `Trait` was:
//
// ```
// trait Trait<'a, T> where Self: 'a { .. }
// ```
//
// we want to ignore such predicates here, because
// there is no type parameter for them to affect. Consider
// a struct containing `dyn Trait`:
//
// ```
// struct MyStruct<'x, X> { field: Box<dyn Trait<'x, X>> }
// ```
//
// The `where Self: 'a` predicate refers to the *existential, hidden type*
// that is represented by the `dyn Trait`, not to the `X` type parameter
// (or any other generic parameter) declared on `MyStruct`.
//
// Note that we do this check for self **before** applying `substs`. In the
// case that `substs` come from a `dyn Trait` type, our caller will have
// included `Self = usize` as the value for `Self`. If we were
// to apply the substs, and not filter this predicate, we might then falsely
// conclude that e.g., `X: 'x` was a reasonable inferred requirement.
//
// Another similar case is where we have an inferred
// requirement like `<Self as Trait>::Foo: 'b`. We presently
// ignore such requirements as well (cc #54467)-- though
// conceivably it might be better if we could extract the `Foo
// = X` binding from the object type (there must be such a
// binding) and thus infer an outlives requirement that `X:
// 'b`.
if let Some(self_ty) = ignored_self_ty {
if let GenericArgKind::Type(ty) = outlives_predicate.0.unpack() {
if ty.walk().any(|arg| arg == self_ty.into()) {
debug!("skipping self ty = {:?}", &ty);
continue;
}
}
}
let predicate = outlives_predicate.subst(tcx, substs);
debug!("predicate = {:?}", &predicate);
insert_outlives_predicate(tcx, predicate.0, predicate.1, span, required_predicates);
}
}
| 40.199377 | 100 | 0.538438 |
907f06473cd20bb25322ef932e308f11600a93eb | 4,008 | //! Game Boy sound 4 generates noise from a Linear Feedback Shift
//! Register.
use spu::envelope::Envelope;
use spu::{Mode, Sample};
pub struct LfsrWave {
/// True if the wave is generating samples
running: bool,
/// Linear Feedback Shift Register
lfsr: Lfsr,
/// Enveloppe that will be used at the next start()
start_envelope: Envelope,
/// Active envelope
envelope: Envelope,
/// Play mode (continuous or counter)
mode: Mode,
/// Counter for counter mode
remaining: u32,
}
impl LfsrWave {
pub fn new() -> LfsrWave {
LfsrWave {
lfsr: Lfsr::from_reg(0),
start_envelope: Envelope::from_reg(0),
envelope: Envelope::from_reg(0),
remaining: 64 * 0x4000,
mode: Mode::Continuous,
running: false,
}
}
pub fn step(&mut self) {
if self.mode == Mode::Counter {
if self.remaining == 0 {
self.running = false;
// Reload counter default value
self.remaining = 64 * 0x4000;
return;
}
self.remaining -= 1;
}
if !self.running {
return;
}
self.envelope.step();
self.lfsr.step();
}
pub fn sample(&self) -> Sample {
if !self.running {
return 0;
}
if self.lfsr.high() {
self.envelope.into_sample()
} else {
0
}
}
pub fn running(&self) -> bool {
self.running
}
pub fn start(&mut self) {
self.envelope = self.start_envelope;
self.running = self.envelope.dac_enabled();
}
pub fn envelope(&self) -> Envelope {
self.start_envelope
}
pub fn set_envelope(&mut self, envelope: Envelope) {
self.start_envelope = envelope;
if !envelope.dac_enabled() {
self.running = false;
}
}
pub fn set_length(&mut self, len: u8) {
if len >= 64 {
panic!("sound length out of range: {}", len);
}
let len = len as u32;
self.remaining = (64 - len) * 0x4000;
}
pub fn mode(&self) -> Mode {
self.mode
}
pub fn set_mode(&mut self, mode: Mode) {
self.mode = mode;
}
pub fn lfsr(&self) -> Lfsr {
self.lfsr
}
pub fn set_lfsr(&mut self, lfsr: Lfsr) {
self.lfsr = lfsr;
}
}
#[derive(Clone, Copy)]
pub struct Lfsr {
register: u16,
width: LfsrWidth,
step_duration: u32,
counter: u32,
reg: u8,
}
#[derive(Clone, Copy)]
enum LfsrWidth {
Lfsr15bit = 0,
Lfsr7bit = 1,
}
impl Lfsr {
pub fn from_reg(val: u8) -> Lfsr {
let (reg, width) = match (val & 8) != 0 {
true => ((1 << 7) - 1, LfsrWidth::Lfsr7bit),
false => ((1 << 15) - 1, LfsrWidth::Lfsr15bit),
};
// There are two divisors in series to generate the LFSR
// clock.
let mut l = match val & 7 {
// 0 is 8 * 0.5 so we need to special case it since
// we're using integer arithmetics.
0 => 8 / 2,
n => 8 * n as u32,
};
l *= 1 << ((val >> 4) + 1) as usize;
Lfsr {
register: reg,
width: width,
step_duration: l,
counter: 0,
reg: val,
}
}
pub fn into_reg(&self) -> u8 {
self.reg
}
fn step(&mut self) {
self.counter += 1;
self.counter %= self.step_duration;
if self.counter == 0 {
self.shift();
}
}
fn high(&self) -> bool {
self.register & 1 != 0
}
fn shift(&mut self) {
let shifted = self.register >> 1;
let carry = (self.register ^ shifted) & 1;
self.register = match self.width {
LfsrWidth::Lfsr7bit => shifted | (carry << 6),
LfsrWidth::Lfsr15bit => shifted | (carry << 14),
};
}
}
| 21.901639 | 65 | 0.493762 |
d5cdfba4059b5fea1bff31c725606567e5e0ef6a | 598 | fn main()
{
let mut src:Result<String,String> = Ok(String::from("apple")) ;
// x= Err("pear");
println!("src={:?}", src);
let value=
src.map( |x| {
format!("1_{}", x)
}).
map(|x| {
format!("2_{}",x)
}).
and_then(|x| {
println!("and_then {:?}",x);
if x=="apple" {
Ok(format!("+_{}", x))
}
else {
Err(format!("-_{}", x))
}
}).
map_err(|x| {
format!("a_{}", x)
}).
map_err(|x| {
format!("a_{}", x)
});
println!("value={:?}", value);
}
| 18.6875 | 67 | 0.362876 |
8afcd94ba034a2b458613d13eddffd0d7c019adf | 14,453 | //! Structured querying for the Tendermint RPC event subscription system.
//!
//! See [`Query`] for details as to how to construct queries.
//!
//! [`Query`]: struct.Query.html
use std::fmt;
use chrono::{Date, DateTime, FixedOffset, Utc};
/// A structured query for use in interacting with the Tendermint RPC event
/// subscription system.
///
/// Allows for compile-time validation of queries.
///
/// See the [subscribe endpoint documentation] for more details.
///
/// ## Examples
///
/// ```rust
/// use tendermint_rpc::query::{Query, EventType};
///
/// let query = Query::from(EventType::NewBlock);
/// assert_eq!("tm.event = 'NewBlock'", query.to_string());
///
/// let query = Query::from(EventType::Tx).and_eq("tx.hash", "XYZ");
/// assert_eq!("tm.event = 'Tx' AND tx.hash = 'XYZ'", query.to_string());
///
/// let query = Query::from(EventType::Tx).and_gte("tx.height", 100_i64);
/// assert_eq!("tm.event = 'Tx' AND tx.height >= 100", query.to_string());
/// ```
///
/// [subscribe endpoint documentation]: https://docs.tendermint.com/master/rpc/#/Websocket/subscribe
#[derive(Debug, Clone, PartialEq)]
pub struct Query {
// We can only have at most one event type at present in a query.
event_type: Option<EventType>,
// We can have zero or more additional conditions associated with a query.
// Conditions are currently exclusively joined by logical ANDs.
conditions: Vec<Condition>,
}
impl Query {
/// Query constructor testing whether `<key> = <value>`
pub fn eq(key: impl ToString, value: impl Into<Operand>) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Eq(key.to_string(), value.into())],
}
}
/// Query constructor testing whether `<key> < <value>`
pub fn lt(key: impl ToString, value: impl Into<Operand>) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Lt(key.to_string(), value.into())],
}
}
/// Query constructor testing whether `<key> <= <value>`
pub fn lte(key: impl ToString, value: impl Into<Operand>) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Lte(key.to_string(), value.into())],
}
}
/// Query constructor testing whether `<key> > <value>`
pub fn gt(key: impl ToString, value: impl Into<Operand>) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Gt(key.to_string(), value.into())],
}
}
/// Query constructor testing whether `<key> >= <value>`
pub fn gte(key: impl ToString, value: impl Into<Operand>) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Gte(key.to_string(), value.into())],
}
}
/// Query constructor testing whether `<key> CONTAINS <value>` (assuming
/// `key` contains a string, this tests whether `value` is a sub-string
/// within it).
pub fn contains(key: impl ToString, value: impl ToString) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Contains(key.to_string(), value.to_string())],
}
}
/// Query constructor testing whether `<key> EXISTS`.
pub fn exists(key: impl ToString) -> Self {
Self {
event_type: None,
conditions: vec![Condition::Exists(key.to_string())],
}
}
/// Add the condition `<key> = <value>` to the query.
pub fn and_eq(mut self, key: impl ToString, value: impl Into<Operand>) -> Self {
self.conditions
.push(Condition::Eq(key.to_string(), value.into()));
self
}
/// Add the condition `<key> < <value>` to the query.
pub fn and_lt(mut self, key: impl ToString, value: impl Into<Operand>) -> Self {
self.conditions
.push(Condition::Lt(key.to_string(), value.into()));
self
}
/// Add the condition `<key> <= <value>` to the query.
pub fn and_lte(mut self, key: impl ToString, value: impl Into<Operand>) -> Self {
self.conditions
.push(Condition::Lte(key.to_string(), value.into()));
self
}
/// Add the condition `<key> > <value>` to the query.
pub fn and_gt(mut self, key: impl ToString, value: impl Into<Operand>) -> Self {
self.conditions
.push(Condition::Gt(key.to_string(), value.into()));
self
}
/// Add the condition `<key> >= <value>` to the query.
pub fn and_gte(mut self, key: impl ToString, value: impl Into<Operand>) -> Self {
self.conditions
.push(Condition::Gte(key.to_string(), value.into()));
self
}
/// Add the condition `<key> CONTAINS <value>` to the query.
pub fn and_contains(mut self, key: impl ToString, value: impl ToString) -> Self {
self.conditions
.push(Condition::Contains(key.to_string(), value.to_string()));
self
}
/// Add the condition `<key> EXISTS` to the query.
pub fn and_exists(mut self, key: impl ToString) -> Self {
self.conditions.push(Condition::Exists(key.to_string()));
self
}
}
impl Default for Query {
/// An empty query matches any set of events. See [these docs].
///
/// [these docs]: https://godoc.org/github.com/tendermint/tendermint/libs/pubsub/query#Empty
fn default() -> Self {
Self {
event_type: None,
conditions: Vec::new(),
}
}
}
impl From<EventType> for Query {
fn from(t: EventType) -> Self {
Self {
event_type: Some(t),
conditions: Vec::new(),
}
}
}
impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(t) = &self.event_type {
write!(f, "tm.event = '{}'", t)?;
if !self.conditions.is_empty() {
write!(f, " AND ")?;
}
}
join(f, " AND ", &self.conditions)?;
Ok(())
}
}
fn join<S, I>(f: &mut fmt::Formatter<'_>, separator: S, iterable: I) -> fmt::Result
where
S: fmt::Display,
I: IntoIterator,
I::Item: fmt::Display,
{
let mut iter = iterable.into_iter();
if let Some(first) = iter.next() {
write!(f, "{}", first)?;
}
for item in iter {
write!(f, "{}{}", separator, item)?;
}
Ok(())
}
/// The types of Tendermint events for which we can query at present.
#[derive(Debug, Clone, PartialEq)]
pub enum EventType {
NewBlock,
Tx,
}
impl fmt::Display for EventType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
EventType::NewBlock => write!(f, "NewBlock"),
EventType::Tx => write!(f, "Tx"),
}
}
}
/// The different types of conditions supported by a [`Query`].
///
/// [`Query`]: struct.Query.html
#[derive(Debug, Clone, PartialEq)]
pub enum Condition {
/// Equals
Eq(String, Operand),
/// Less than
Lt(String, Operand),
/// Less than or equal to
Lte(String, Operand),
/// Greater than
Gt(String, Operand),
/// Greater than or equal to
Gte(String, Operand),
/// Contains (to check if a key contains a certain sub-string)
Contains(String, String),
/// Exists (to check if a key exists)
Exists(String),
}
impl fmt::Display for Condition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Condition::Eq(key, op) => write!(f, "{} = {}", key, op),
Condition::Lt(key, op) => write!(f, "{} < {}", key, op),
Condition::Lte(key, op) => write!(f, "{} <= {}", key, op),
Condition::Gt(key, op) => write!(f, "{} > {}", key, op),
Condition::Gte(key, op) => write!(f, "{} >= {}", key, op),
Condition::Contains(key, op) => write!(f, "{} CONTAINS {}", key, escape(op)),
Condition::Exists(key) => write!(f, "{} EXISTS", key),
}
}
}
/// A typed operand for use in an [`Condition`].
///
/// According to the [Tendermint RPC subscribe docs][tm-subscribe],
/// an operand can be a string, number, date or time. We differentiate here
/// between integer and floating point numbers.
///
/// [`Condition`]: enum.Condition.html
/// [tm-subscribe]: https://docs.tendermint.com/master/rpc/#/Websocket/subscribe
#[derive(Debug, Clone, PartialEq)]
pub enum Operand {
String(String),
Signed(i64),
Unsigned(u64),
Float(f64),
Date(Date<Utc>),
DateTime(DateTime<Utc>),
}
impl fmt::Display for Operand {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Operand::String(s) => write!(f, "{}", escape(s)),
Operand::Signed(i) => write!(f, "{}", i),
Operand::Unsigned(u) => write!(f, "{}", u),
Operand::Float(h) => write!(f, "{}", h),
Operand::Date(d) => write!(f, "{}", escape(&d.format("%Y-%m-%d").to_string())),
Operand::DateTime(dt) => write!(f, "{}", escape(&dt.to_rfc3339())),
}
}
}
impl From<String> for Operand {
fn from(source: String) -> Self {
Operand::String(source)
}
}
impl From<char> for Operand {
fn from(source: char) -> Self {
Operand::String(source.to_string())
}
}
impl From<&str> for Operand {
fn from(source: &str) -> Self {
Operand::String(source.to_string())
}
}
impl From<i64> for Operand {
fn from(source: i64) -> Self {
Operand::Signed(source)
}
}
impl From<i32> for Operand {
fn from(source: i32) -> Self {
Operand::Signed(source as i64)
}
}
impl From<i16> for Operand {
fn from(source: i16) -> Self {
Operand::Signed(source as i64)
}
}
impl From<i8> for Operand {
fn from(source: i8) -> Self {
Operand::Signed(source as i64)
}
}
impl From<u64> for Operand {
fn from(source: u64) -> Self {
Operand::Unsigned(source)
}
}
impl From<u32> for Operand {
fn from(source: u32) -> Self {
Operand::Unsigned(source as u64)
}
}
impl From<u16> for Operand {
fn from(source: u16) -> Self {
Operand::Unsigned(source as u64)
}
}
impl From<u8> for Operand {
fn from(source: u8) -> Self {
Operand::Unsigned(source as u64)
}
}
impl From<usize> for Operand {
fn from(source: usize) -> Self {
Operand::Unsigned(source as u64)
}
}
impl From<f64> for Operand {
fn from(source: f64) -> Self {
Operand::Float(source)
}
}
impl From<f32> for Operand {
fn from(source: f32) -> Self {
Operand::Float(source as f64)
}
}
impl From<Date<Utc>> for Operand {
fn from(source: Date<Utc>) -> Self {
Operand::Date(source)
}
}
impl From<DateTime<Utc>> for Operand {
fn from(source: DateTime<Utc>) -> Self {
Operand::DateTime(source)
}
}
impl From<DateTime<FixedOffset>> for Operand {
fn from(source: DateTime<FixedOffset>) -> Self {
Operand::DateTime(source.into())
}
}
/// Escape backslashes and single quotes within the given string with a backslash.
fn escape(s: &str) -> String {
let mut result = String::new();
for ch in s.chars() {
if ch == '\\' || ch == '\'' {
result.push('\\');
}
result.push(ch);
}
format!("'{}'", result)
}
#[cfg(test)]
mod test {
use super::*;
use chrono::NaiveDate;
#[test]
fn empty_query() {
let query = Query::default();
assert_eq!("", query.to_string());
}
#[test]
fn simple_event_type() {
let query = Query::from(EventType::NewBlock);
assert_eq!("tm.event = 'NewBlock'", query.to_string());
let query = Query::from(EventType::Tx);
assert_eq!("tm.event = 'Tx'", query.to_string());
}
#[test]
fn simple_condition() {
let query = Query::eq("key", "value");
assert_eq!("key = 'value'", query.to_string());
let query = Query::eq("key", 'v');
assert_eq!("key = 'v'", query.to_string());
let query = Query::eq("key", "'value'");
assert_eq!("key = '\\'value\\''", query.to_string());
let query = Query::eq("key", "\\'value'");
assert_eq!("key = '\\\\\\'value\\''", query.to_string());
let query = Query::lt("key", 42_i64);
assert_eq!("key < 42", query.to_string());
let query = Query::lt("key", 42_u64);
assert_eq!("key < 42", query.to_string());
let query = Query::lte("key", 42_i64);
assert_eq!("key <= 42", query.to_string());
let query = Query::gt("key", 42_i64);
assert_eq!("key > 42", query.to_string());
let query = Query::gte("key", 42_i64);
assert_eq!("key >= 42", query.to_string());
let query = Query::eq("key", 42_u8);
assert_eq!("key = 42", query.to_string());
let query = Query::contains("key", "some-substring");
assert_eq!("key CONTAINS 'some-substring'", query.to_string());
let query = Query::exists("key");
assert_eq!("key EXISTS", query.to_string());
}
#[test]
fn date_condition() {
let query = Query::eq(
"some_date",
Date::from_utc(NaiveDate::from_ymd(2020, 9, 24), Utc),
);
assert_eq!("some_date = '2020-09-24'", query.to_string());
}
#[test]
fn date_time_condition() {
let query = Query::eq(
"some_date_time",
DateTime::parse_from_rfc3339("2020-09-24T10:17:23-04:00").unwrap(),
);
assert_eq!(
"some_date_time = '2020-09-24T14:17:23+00:00'",
query.to_string()
);
}
#[test]
fn complex_query() {
let query = Query::from(EventType::Tx).and_eq("tx.height", 3_i64);
assert_eq!("tm.event = 'Tx' AND tx.height = 3", query.to_string());
let query = Query::from(EventType::Tx)
.and_lte("tx.height", 100_i64)
.and_eq("transfer.sender", "AddrA");
assert_eq!(
"tm.event = 'Tx' AND tx.height <= 100 AND transfer.sender = 'AddrA'",
query.to_string()
);
let query = Query::from(EventType::Tx)
.and_lte("tx.height", 100_i64)
.and_contains("meta.attr", "some-substring");
assert_eq!(
"tm.event = 'Tx' AND tx.height <= 100 AND meta.attr CONTAINS 'some-substring'",
query.to_string()
);
}
}
| 28.676587 | 100 | 0.560161 |
11575139adcf6656b069e3e14ffa45e6d771afc4 | 6,477 | use anyhow::Error;
use flate2::read::GzDecoder;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use tar::Archive;
const DEFAULT_TARGET: &str = "x86_64-unknown-linux-gnu";
#[derive(Debug, Hash, Eq, PartialEq, Clone)]
pub(crate) enum PkgType {
Rust,
RustSrc,
Rustc,
Cargo,
Rls,
RustAnalyzer,
Clippy,
Rustfmt,
LlvmTools,
Miri,
Other(String),
}
impl PkgType {
pub(crate) fn from_component(component: &str) -> Self {
match component {
"rust" => PkgType::Rust,
"rust-src" => PkgType::RustSrc,
"rustc" => PkgType::Rustc,
"cargo" => PkgType::Cargo,
"rls" | "rls-preview" => PkgType::Rls,
"rust-analyzer" | "rust-analyzer-preview" => PkgType::RustAnalyzer,
"clippy" | "clippy-preview" => PkgType::Clippy,
"rustfmt" | "rustfmt-preview" => PkgType::Rustfmt,
"llvm-tools" | "llvm-tools-preview" => PkgType::LlvmTools,
"miri" | "miri-preview" => PkgType::Miri,
other => PkgType::Other(other.into()),
}
}
/// First part of the tarball name.
fn tarball_component_name(&self) -> &str {
match self {
PkgType::Rust => "rust",
PkgType::RustSrc => "rust-src",
PkgType::Rustc => "rustc",
PkgType::Cargo => "cargo",
PkgType::Rls => "rls",
PkgType::RustAnalyzer => "rust-analyzer",
PkgType::Clippy => "clippy",
PkgType::Rustfmt => "rustfmt",
PkgType::LlvmTools => "llvm-tools",
PkgType::Miri => "miri",
PkgType::Other(component) => component,
}
}
/// Whether this package has the same version as Rust itself, or has its own `version` and
/// `git-commit-hash` files inside the tarball.
fn should_use_rust_version(&self) -> bool {
match self {
PkgType::Cargo => false,
PkgType::Rls => false,
PkgType::RustAnalyzer => false,
PkgType::Clippy => false,
PkgType::Rustfmt => false,
PkgType::LlvmTools => false,
PkgType::Miri => false,
PkgType::Rust => true,
PkgType::RustSrc => true,
PkgType::Rustc => true,
PkgType::Other(_) => true,
}
}
/// Whether this package is target-independent or not.
fn target_independent(&self) -> bool {
*self == PkgType::RustSrc
}
}
#[derive(Debug, Default, Clone)]
pub(crate) struct VersionInfo {
pub(crate) version: Option<String>,
pub(crate) git_commit: Option<String>,
pub(crate) present: bool,
}
pub(crate) struct Versions {
channel: String,
dist_path: PathBuf,
versions: HashMap<PkgType, VersionInfo>,
}
impl Versions {
pub(crate) fn new(channel: &str, dist_path: &Path) -> Result<Self, Error> {
Ok(Self { channel: channel.into(), dist_path: dist_path.into(), versions: HashMap::new() })
}
pub(crate) fn channel(&self) -> &str {
&self.channel
}
pub(crate) fn version(&mut self, mut package: &PkgType) -> Result<VersionInfo, Error> {
if package.should_use_rust_version() {
package = &PkgType::Rust;
}
match self.versions.get(package) {
Some(version) => Ok(version.clone()),
None => {
let version_info = self.load_version_from_tarball(package)?;
self.versions.insert(package.clone(), version_info.clone());
Ok(version_info)
}
}
}
fn load_version_from_tarball(&mut self, package: &PkgType) -> Result<VersionInfo, Error> {
let tarball_name = self.tarball_name(package, DEFAULT_TARGET)?;
let tarball = self.dist_path.join(tarball_name);
let file = match File::open(&tarball) {
Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
// Missing tarballs do not return an error, but return empty data.
return Ok(VersionInfo::default());
}
Err(err) => return Err(err.into()),
};
let mut tar = Archive::new(GzDecoder::new(file));
let mut version = None;
let mut git_commit = None;
for entry in tar.entries()? {
let mut entry = entry?;
let dest;
match entry.path()?.components().nth(1).and_then(|c| c.as_os_str().to_str()) {
Some("version") => dest = &mut version,
Some("git-commit-hash") => dest = &mut git_commit,
_ => continue,
}
let mut buf = String::new();
entry.read_to_string(&mut buf)?;
*dest = Some(buf);
// Short circuit to avoid reading the whole tar file if not necessary.
if version.is_some() && git_commit.is_some() {
break;
}
}
Ok(VersionInfo { version, git_commit, present: true })
}
pub(crate) fn disable_version(&mut self, package: &PkgType) {
match self.versions.get_mut(package) {
Some(version) => {
*version = VersionInfo::default();
}
None => {
self.versions.insert(package.clone(), VersionInfo::default());
}
}
}
pub(crate) fn archive_name(
&mut self,
package: &PkgType,
target: &str,
extension: &str,
) -> Result<String, Error> {
let component_name = package.tarball_component_name();
let version = match self.channel.as_str() {
"stable" => self.rustc_version().into(),
"beta" => "beta".into(),
"nightly" => "nightly".into(),
_ => format!("{}-dev", self.rustc_version()),
};
if package.target_independent() {
Ok(format!("{}-{}.{}", component_name, version, extension))
} else {
Ok(format!("{}-{}-{}.{}", component_name, version, target, extension))
}
}
pub(crate) fn tarball_name(
&mut self,
package: &PkgType,
target: &str,
) -> Result<String, Error> {
self.archive_name(package, target, "tar.gz")
}
pub(crate) fn rustc_version(&self) -> &str {
const RUSTC_VERSION: &str = include_str!("../../../version");
RUSTC_VERSION.trim()
}
}
| 31.595122 | 99 | 0.538521 |
6aa914f80b7eb944b734c358b4019c0cacaf9718 | 4,881 | mod private;
mod public;
mod queue;
pub use private::Private;
pub use public::Public;
use super::*;
use cfg_if::cfg_if;
use cfg_match::cfg_match;
use serde::{Deserialize, Serialize};
cfg_if! {
if #[cfg(feature = "std_web")] {
use stdweb::Value;
#[allow(unused_imports)]
use stdweb::{_js_impl, js};
} else if #[cfg(feature = "web_sys")] {
use crate::utils;
use js_sys::{Array, Reflect, Uint8Array};
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
use web_sys::{Blob, BlobPropertyBag, DedicatedWorkerGlobalScope, MessageEvent, Url, Worker, WorkerOptions};
}
}
/// Implements rules to register a worker in a separate thread.
pub trait Threaded {
/// Executes an agent in the current environment.
/// Uses in `main` function of a worker.
fn register();
}
/// Message packager, based on serde::Serialize/Deserialize
pub trait Packed {
/// Pack serializable message into Vec<u8>
fn pack(&self) -> Vec<u8>;
/// Unpack deserializable message of byte slice
fn unpack(data: &[u8]) -> Self;
}
impl<T: Serialize + for<'de> Deserialize<'de>> Packed for T {
fn pack(&self) -> Vec<u8> {
bincode::serialize(&self).expect("can't serialize an agent message")
}
fn unpack(data: &[u8]) -> Self {
bincode::deserialize(&data).expect("can't deserialize an agent message")
}
}
/// Serializable messages to worker
#[derive(Serialize, Deserialize, Debug)]
enum ToWorker<T> {
/// Client is connected
Connected(HandlerId),
/// Incoming message to Worker
ProcessInput(HandlerId, T),
/// Client is disconnected
Disconnected(HandlerId),
/// Worker should be terminated
Destroy,
}
/// Serializable messages sent by worker to consumer
#[derive(Serialize, Deserialize, Debug)]
enum FromWorker<T> {
/// Worker sends this message when `wasm` bundle has loaded.
WorkerLoaded,
/// Outgoing message to consumer
ProcessOutput(HandlerId, T),
}
fn send_to_remote<AGN>(
#[cfg(feature = "std_web")] worker: &Value,
#[cfg(feature = "web_sys")] worker: &Worker,
msg: ToWorker<AGN::Input>,
) where
AGN: Agent,
<AGN as Agent>::Input: Serialize + for<'de> Deserialize<'de>,
<AGN as Agent>::Output: Serialize + for<'de> Deserialize<'de>,
{
let msg = msg.pack();
cfg_match! {
feature = "std_web" => js! {
var worker = @{worker};
var bytes = @{msg};
worker.postMessage(bytes);
},
feature = "web_sys" => worker.post_message_vec(msg),
};
}
#[cfg(feature = "web_sys")]
fn worker_new(name_of_resource: &str, is_module: bool) -> Worker {
let origin = utils::origin().unwrap();
let script_url = format!("{}/{}", origin, name_of_resource);
let wasm_url = format!("{}/{}", origin, name_of_resource.replace(".js", "_bg.wasm"));
let array = Array::new();
array.push(
&format!(
r#"importScripts("{}");wasm_bindgen("{}");"#,
script_url, wasm_url
)
.into(),
);
let blob = Blob::new_with_str_sequence_and_options(
&array,
BlobPropertyBag::new().type_("application/javascript"),
)
.unwrap();
let url = Url::create_object_url_with_blob(&blob).unwrap();
if is_module {
let options = WorkerOptions::new();
Reflect::set(
options.as_ref(),
&JsValue::from_str("type"),
&JsValue::from_str("module"),
)
.unwrap();
Worker::new_with_options(&url, &options).expect("failed to spawn worker")
} else {
Worker::new(&url).expect("failed to spawn worker")
}
}
#[cfg(feature = "web_sys")]
fn worker_self() -> DedicatedWorkerGlobalScope {
JsValue::from(js_sys::global()).into()
}
#[cfg(feature = "web_sys")]
trait WorkerExt {
fn set_onmessage_closure(&self, handler: impl 'static + Fn(Vec<u8>));
fn post_message_vec(&self, data: Vec<u8>);
}
#[cfg(feature = "web_sys")]
macro_rules! worker_ext_impl {
($($type:ident),+) => {$(
impl WorkerExt for $type {
fn set_onmessage_closure(&self, handler: impl 'static + Fn(Vec<u8>)) {
let handler = move |message: MessageEvent| {
let data = Uint8Array::from(message.data()).to_vec();
handler(data);
};
let closure = Closure::wrap(Box::new(handler) as Box<dyn Fn(MessageEvent)>);
self.set_onmessage(Some(closure.as_ref().unchecked_ref()));
closure.forget();
}
fn post_message_vec(&self, data: Vec<u8>) {
self.post_message(&Uint8Array::from(data.as_slice()))
.expect("failed to post message");
}
}
)+};
}
#[cfg(feature = "web_sys")]
worker_ext_impl! {
Worker, DedicatedWorkerGlobalScope
}
| 29.762195 | 115 | 0.600697 |
75556b01179a6c55ef8cdaaca5a37f1a58a88cc2 | 2,612 | // Copyright 2019 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the
// Software without restriction, including without
// limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice
// shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
jsonrpsee::rpc_api! {
Health {
/// Test
fn system_name(foo: String, bar: i32) -> String;
fn test_notif(foo: String, bar: i32);
/// Test2
#[rpc(method = "foo")]
fn system_name2() -> String;
}
System {
fn test_foo() -> String;
}
}
fn main() {
// Spawning a server in a background task.
async_std::task::spawn(async move {
let listen_addr = "127.0.0.1:8000".parse().unwrap();
let mut server1 = jsonrpsee::http_server(&listen_addr).await.unwrap();
while let Ok(request) = Health::next_request(&mut server1).await {
match request {
Health::SystemName { respond, foo, bar } => {
let value = format!("{}, {}", foo, bar);
respond.ok(value).await;
}
Health::SystemName2 { respond } => {
respond.ok("hello 2").await;
}
Health::TestNotif { foo, bar } => {
println!("server got notif: {:?} {:?}", foo, bar);
}
}
}
});
// Client demo.
let mut client = jsonrpsee::http_client("http://127.0.0.1:8000");
let v = async_std::task::block_on(async {
Health::test_notif(&mut client, "notif_string", 192)
.await
.unwrap();
Health::system_name(&mut client, "hello", 5).await.unwrap()
});
println!("{:?}", v);
}
| 34.368421 | 78 | 0.602221 |
7a65d1768dafe87d1ba735599b23e74ade36b16e | 631 | extern crate protobuf_codegen_pure;
use std::path::Path;
fn main() {
if !Path::new("src/googleplay.rs").exists() {
protobuf_codegen_pure::Codegen::new()
.out_dir("src")
.inputs(&["protos/googleplay.proto"])
.include("protos")
.customize(protobuf_codegen_pure::Customize {
expose_fields: Some(true),
generate_accessors: Some(false),
serde_derive: Some(true),
// singular_field_option: Some(true),
..Default::default()
})
.run()
.expect("protoc");
}
}
| 30.047619 | 57 | 0.51981 |
9be13a334802d725f80c84d8da0b22b8cd523667 | 7,100 | use crate::content::Content;
use crate::de::{FnApply, MapLookupVisitor};
use crate::ser::Wrap;
use crate::Registry;
use serde::de::{
self, DeserializeSeed, Deserializer, IgnoredAny, IntoDeserializer, MapAccess, SeqAccess,
Visitor,
};
use serde::ser::{SerializeStruct, Serializer};
use std::fmt;
pub fn serialize<S, T>(
serializer: S,
trait_object: &'static str,
tag: &'static str,
variant: &'static str,
content: &'static str,
concrete: &T,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: ?Sized + erased_serde::Serialize,
{
let mut ser = serializer.serialize_struct(trait_object, 2)?;
ser.serialize_field(tag, variant)?;
ser.serialize_field(content, &Wrap(concrete))?;
ser.end()
}
pub fn deserialize<'de, D, T>(
deserializer: D,
trait_object: &'static str,
fields: &'static [&'static str],
registry: &'static Registry<T>,
) -> Result<Box<T>, D::Error>
where
D: Deserializer<'de>,
T: ?Sized,
{
let visitor = TaggedVisitor {
trait_object,
tag: fields[0],
content: fields[1],
registry,
};
deserializer.deserialize_struct(trait_object, fields, visitor)
}
struct TaggedVisitor<T: ?Sized + 'static> {
trait_object: &'static str,
tag: &'static str,
content: &'static str,
registry: &'static Registry<T>,
}
impl<'de, T: ?Sized> Visitor<'de> for TaggedVisitor<T> {
type Value = Box<T>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "dyn {}", self.trait_object)
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let map_lookup = MapLookupVisitor {
expected: &self,
registry: self.registry,
};
let field_seed = TagContentOtherFieldVisitor {
tag: self.tag,
content: self.content,
};
let next_relevant_key = |map: &mut A| {
while let Some(key) = map.next_key_seed(field_seed)? {
match key {
TagContentOtherField::Tag => return Ok(Some(TagOrContentField::Tag)),
TagContentOtherField::Content => return Ok(Some(TagOrContentField::Content)),
TagContentOtherField::Other => {
map.next_value::<IgnoredAny>()?;
continue;
}
}
}
Ok(None)
};
// Visit the first relevant key.
let ret = match next_relevant_key(&mut map)? {
// First key is the tag.
Some(TagOrContentField::Tag) => {
// Parse the tag.
let deserialize_fn = map.next_value_seed(map_lookup)?;
// Visit the second key.
match next_relevant_key(&mut map)? {
// Second key is a duplicate of the tag.
Some(TagOrContentField::Tag) => {
return Err(de::Error::duplicate_field(self.tag));
}
// Second key is the content.
Some(TagOrContentField::Content) => {
let fn_apply = FnApply { deserialize_fn };
map.next_value_seed(fn_apply)?
}
// There is no second key; might be okay if the we have a unit variant.
None => {
let fn_apply = FnApply { deserialize_fn };
let unit = ().into_deserializer();
return fn_apply.deserialize(unit);
}
}
}
// First key is the content.
Some(TagOrContentField::Content) => {
// Buffer up the content.
let content = map.next_value::<Content>()?;
// Visit the second key.
match next_relevant_key(&mut map)? {
// Second key is the tag.
Some(TagOrContentField::Tag) => {
// Parse the tag.
let deserialize_fn = map.next_value_seed(map_lookup)?;
let fn_apply = FnApply { deserialize_fn };
let content = content.into_deserializer();
fn_apply.deserialize(content)?
}
// Second key is a duplicate of the content.
Some(TagOrContentField::Content) => {
return Err(de::Error::duplicate_field(self.content));
}
// There is no second key.
None => return Err(de::Error::missing_field(self.tag)),
}
}
// There is no first key.
None => return Err(de::Error::missing_field(self.tag)),
};
match next_relevant_key(&mut map)? {
Some(TagOrContentField::Tag) => Err(de::Error::duplicate_field(self.tag)),
Some(TagOrContentField::Content) => Err(de::Error::duplicate_field(self.content)),
None => Ok(ret),
}
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let map_lookup = MapLookupVisitor {
expected: &self,
registry: self.registry,
};
// Visit the first element - the tag.
let deserialize_fn = match seq.next_element_seed(map_lookup)? {
Some(deserialize_fn) => deserialize_fn,
None => return Err(de::Error::invalid_length(0, &self)),
};
// Visit the second element - the content.
let fn_apply = FnApply { deserialize_fn };
match seq.next_element_seed(fn_apply)? {
Some(ret) => Ok(ret),
None => Err(de::Error::invalid_length(1, &self)),
}
}
}
enum TagOrContentField {
Tag,
Content,
}
enum TagContentOtherField {
Tag,
Content,
Other,
}
#[derive(Copy, Clone)]
struct TagContentOtherFieldVisitor {
tag: &'static str,
content: &'static str,
}
impl<'de> DeserializeSeed<'de> for TagContentOtherFieldVisitor {
type Value = TagContentOtherField;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(self)
}
}
impl<'de> Visitor<'de> for TagContentOtherFieldVisitor {
type Value = TagContentOtherField;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"{:?}, {:?}, or other ignored fields",
self.tag, self.content
)
}
fn visit_str<E>(self, field: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
if field == self.tag {
Ok(TagContentOtherField::Tag)
} else if field == self.content {
Ok(TagContentOtherField::Content)
} else {
Ok(TagContentOtherField::Other)
}
}
}
| 31.555556 | 97 | 0.532113 |
cc06edb331747d7d33ad58556ed5c76b69f5735f | 1,775 | use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use chrono::{DateTime, Utc};
use log::error;
use super::{ExportEntry, Families};
pub struct SessionRoutes {
pub families: Families,
pub routes: HashMap<DateTime<Utc>, Arc<ExportEntry>>,
pending: HashSet<DateTime<Utc>>,
advertised: HashSet<DateTime<Utc>>,
}
impl SessionRoutes {
pub fn new(families: Families) -> Self {
Self {
families,
routes: HashMap::new(),
pending: HashSet::new(),
advertised: HashSet::new(),
}
}
pub fn pending(&self) -> Vec<Arc<ExportEntry>> {
self.routes
.iter()
.filter(|(ts, _)| self.pending.contains(&ts))
.filter(|(_, entry)| self.families.contains(entry.update.family))
.map(|(_, entry)| entry.clone())
.collect()
}
pub fn advertised(&self) -> Vec<Arc<ExportEntry>> {
self.routes
.iter()
.filter(|(ts, _)| self.advertised.contains(&ts))
.filter(|(_, entry)| self.families.contains(entry.update.family))
.map(|(_, entry)| entry.clone())
.collect()
}
pub fn insert_routes(&mut self, entries: Vec<Arc<ExportEntry>>) {
for entry in entries.into_iter() {
let ts = entry.timestamp;
// If this entry is not present, add to pending routes
if self.routes.insert(ts, entry).is_none() {
self.pending.insert(ts);
}
}
}
pub fn mark_advertised(&mut self, entry: &Arc<ExportEntry>) {
let ts = entry.timestamp;
if !self.pending.remove(&ts) {
error!("No route to remove: {}", ts);
}
self.advertised.insert(ts);
}
}
| 29.098361 | 77 | 0.548169 |
cccb32bd56801341a8810a8fa22d56e936dbc2d7 | 4,103 | use std::collections::{HashMap, VecDeque, HashSet};
use std::fs::File;
use std::io::{BufReader, BufRead};
use regex::Regex;
use std::iter::{FromIterator, Peekable};
use std::cmp::{max, min};
use std::mem;
mod test;
fn read_lines_from_file(filename: &str) -> std::io::Result<Vec<String>> {
let f = File::open(filename)?;
let reader = BufReader::new(f);
return reader.lines().collect();
}
type I = i64;
#[derive(Debug, Clone)]
enum LexerItem {
Paren(char),
Operand(char),
Number(I),
}
impl LexerItem {
fn lex(input: &String) -> Result<Vec<LexerItem>, String> {
let mut result = Vec::new();
let mut it = input.chars().peekable();
while let Some(&c) = it.peek() {
match c {
'0'..='9' => {
it.next();
let n = LexerItem::get_number(c, &mut it);
result.push(LexerItem::Number(n));
}
'+' | '*' => {
result.push(LexerItem::Operand(c));
it.next();
}
'(' | ')' => {
result.push(LexerItem::Paren(c));
it.next();
}
' ' => {
it.next();
}
_ => {
return Err(format!("unexpected character {}", c));
}
}
}
Ok(result)
}
fn get_number<T: Iterator<Item = char>>(c: char, iter: &mut Peekable<T>) -> I {
let mut number = c.to_string().parse::<I>().expect("The caller should have passed a digit.");
while let Some(Ok(digit)) = iter.peek().map(|c| c.to_string().parse::<I>()) {
number = number * 10 + digit;
iter.next();
}
number
}
}
fn eval_number(lex: &LexerItem) -> I {
match lex {
LexerItem::Number(i) => *i,
_ => panic!("Unexpected token {:?}", lex)
}
}
fn eval_operand(lex: &LexerItem) -> char {
match lex {
LexerItem::Operand(i) => *i,
_ => panic!("Unexpected token {:?}", lex)
}
}
pub trait PeekableIterator : std::iter::Iterator {
fn peek(&mut self) -> Option<&Self::Item>;
}
impl<I: std::iter::Iterator> PeekableIterator for std::iter::Peekable<I> {
fn peek(&mut self) -> Option<&Self::Item> {
std::iter::Peekable::peek(self)
}
}
fn eval(prg: &mut PeekableIterator<Item=&LexerItem>) -> I {
let mut acc = eval_number_or_term(prg);
if let Some(n) = prg.peek() {
if let LexerItem::Operand(_) = n {
} else if let LexerItem::Paren(')') = n {
return acc;
} else {
return acc;
}
}
while let Some(lex1) = prg.peek() {
if let LexerItem::Paren(')') = lex1 {
return acc;
}
let lex2 = prg.next().unwrap();
let op = eval_operand(lex2);
let term : I;
if op == '+' {
term = eval_number_or_term(prg);
} else {
term = eval(prg);
}
print!("Calc: {} {} {} = ", acc, op, term);
acc = match op {
'+' => acc + term,
'-' => acc - term,
'*' => acc * term,
'/' => acc / term,
_ => panic!("Unknown operand {}", op)
};
println!("{}", acc);
}
return acc
}
fn eval_number_or_term(prg: &mut PeekableIterator<Item=&LexerItem>) -> I {
let lex = prg.next().expect("Expect number");
let acc: I;
if let LexerItem::Paren('(') = lex {
acc = eval(&mut prg.peekable());
// prg.next().expect("End paren");
println!("Term is: {}", acc);
} else {
acc = eval_number(&lex);
}
acc
}
fn main() -> std::io::Result<()> {
let mut data = read_lines_from_file("sample.txt")?;
let result = data.iter()
.map(|s|{
let prg = LexerItem::lex(s).unwrap();
eval(&mut prg.iter().peekable())
})
.inspect(|i| println!("{}", i))
.sum::<I>();
println!("Result {}", result);
Ok(())
}
| 24.278107 | 101 | 0.468438 |
ddedd17c14670d2261f6b86761413f2158246642 | 2,073 | //! The random color distortion algorithm.
use crate::common::*;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ColorJitterInit {
pub hue_shift: Option<R64>,
pub saturation_shift: Option<R64>,
pub value_shift: Option<R64>,
}
impl ColorJitterInit {
pub fn build(self) -> ColorJitter {
let Self {
hue_shift,
saturation_shift,
value_shift,
} = self;
ColorJitter {
max_hue_shift: hue_shift.map(R64::raw),
max_saturation_shift: saturation_shift.map(R64::raw),
max_value_shift: value_shift.map(R64::raw),
}
}
}
#[derive(Debug, Clone)]
pub struct ColorJitter {
max_hue_shift: Option<f64>,
max_saturation_shift: Option<f64>,
max_value_shift: Option<f64>,
}
impl ColorJitter {
pub fn forward(&self, rgb: &Tensor) -> Result<Tensor> {
tch::no_grad(|| -> Result<_> {
let (channels, _height, _width) = rgb.size3()?;
ensure!(
channels == 3,
"channel size must be 3, but get {}",
channels
);
let mut rng = StdRng::from_entropy();
let hsv = rgb.rgb_to_hsv();
let hue = hsv.select(0, 0);
let saturation = hsv.select(0, 1);
let value = hsv.select(0, 2);
if let Some(max_shift) = self.max_hue_shift {
let shift = rng.gen_range((-max_shift)..max_shift);
let _ = hue.g_add_scalar(shift + 1.0).fmod_(1.0);
}
if let Some(max_shift) = self.max_saturation_shift {
let shift = rng.gen_range((-max_shift)..max_shift);
let _ = saturation.g_add_scalar(shift).clamp_(0.0, 1.0);
}
if let Some(max_shift) = self.max_value_shift {
let shift = rng.gen_range((-max_shift)..max_shift);
let _ = value.g_add_scalar(shift).clamp_(0.0, 1.0);
}
let new_rgb = hsv.hsv_to_rgb();
Ok(new_rgb)
})
}
}
| 28.39726 | 72 | 0.541245 |
7a2edf0daadd7b3053077e01304c7797b3a902c0 | 1,715 | pub use lucet_wiggle_macro::lucet_integration;
pub use wiggle::*;
pub mod runtime {
use wiggle::{borrow::BorrowChecker, BorrowHandle, GuestError, GuestMemory, Region};
pub struct LucetMemory<'a> {
memory: &'a mut [u8],
bc: BorrowChecker,
}
impl<'a> LucetMemory<'a> {
pub fn new(memory: &'a mut [u8]) -> LucetMemory {
LucetMemory {
memory,
// Safety: we only construct a LucetMemory at the entry point of hostcalls, and
// hostcalls are not re-entered, therefore there is exactly one BorrowChecker per
// memory.
bc: BorrowChecker::new(),
}
}
}
unsafe impl<'a> GuestMemory for LucetMemory<'a> {
fn base(&self) -> (*mut u8, u32) {
let len = self.memory.len() as u32;
let ptr = self.memory.as_ptr();
(ptr as *mut u8, len)
}
fn has_outstanding_borrows(&self) -> bool {
self.bc.has_outstanding_borrows()
}
fn is_mut_borrowed(&self, r: Region) -> bool {
self.bc.is_mut_borrowed(r)
}
fn is_shared_borrowed(&self, r: Region) -> bool {
self.bc.is_shared_borrowed(r)
}
fn mut_borrow(&self, r: Region) -> Result<BorrowHandle, GuestError> {
self.bc.mut_borrow(r)
}
fn shared_borrow(&self, r: Region) -> Result<BorrowHandle, GuestError> {
self.bc.shared_borrow(r)
}
fn mut_unborrow(&self, h: BorrowHandle) {
self.bc.mut_unborrow(h)
}
fn shared_unborrow(&self, h: BorrowHandle) {
self.bc.shared_unborrow(h)
}
}
}
| 32.358491 | 97 | 0.545773 |
fe5e6faaddcbf826c5faab1bb2bcc6f17e206dd5 | 744 | use crate::memory::Ref;
use crate::parser::CNode;
use crate::runtime::engine::Engine;
use crate::runtime::r#return::{ Flow, ReturnFlow };
use crate::walker::{ ANode, SNode };
use crate::walker::nodes::ADeclaration;
use crate::walker::traits::WExpression;
pub struct ALet {
declaration: SNode<ADeclaration>,
}
impl ALet {
pub fn new(declaration: SNode<ADeclaration>) -> Self {
Self {
declaration,
}
}
}
impl ANode for ALet {
fn build(node: Ref<CNode>) -> Self {
Self::new(SNode::build(node.front(1)))
}
}
impl WExpression for ALet {
fn walk<'a>(&self, engine: &mut Engine<'a>) -> ReturnFlow<'a> {
Flow::reference(self.declaration.get().walk(engine)?.build(engine))
}
}
| 23.25 | 75 | 0.627688 |
5bb977ba8d45ec02a7c958537bcf2380b12b8ce9 | 9,948 | // Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::fs::OpenOptions;
use std::io::{Read, Write};
use getopts::Options;
use disk::QcowFile;
use sys_util::WriteZeroes;
fn show_usage(program_name: &str) {
println!("Usage: {} [subcommand] <subcommand args>", program_name);
println!("\nSubcommands:");
println!(
"{} header <file name> - Show the qcow2 header for a file.",
program_name
);
println!(
"{} l1_table <file name> - Show the L1 table entries for a file.",
program_name
);
println!(
"{} l22table <file name> <l1 index> - Show the L2 table pointed to by the nth L1 entry.",
program_name
);
println!(
"{} ref_table <file name> - Show the refblock table for the file.",
program_name
);
println!(
"{} ref_block <file_name> <table index> - Show the nth reblock in the file.",
program_name
);
println!(
"{} dd <file_name> <source_file> - Write bytes from the raw source_file to the file.",
program_name
);
println!(
"{} convert <src_file> <dst_file> - Convert from src_file to dst_file.",
program_name
);
}
fn main() -> std::result::Result<(), ()> {
let args: Vec<String> = std::env::args().collect();
let opts = Options::new();
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!(f.to_string()),
};
if matches.free.len() < 2 {
println!("Must specify the subcommand and the QCOW file to operate on.");
show_usage(&args[0]);
return Err(());
}
match matches.free[0].as_ref() {
"header" => show_header(&matches.free[1]),
"help" => {
show_usage(&args[0]);
Ok(())
}
"l1_table" => show_l1_table(&matches.free[1]),
"l2_table" => {
if matches.free.len() < 2 {
println!("Filename and table index are required.");
show_usage(&args[0]);
return Err(());
}
show_l2_table(&matches.free[1], matches.free[2].parse().unwrap())
}
"ref_table" => show_ref_table(&matches.free[1]),
"ref_block" => {
if matches.free.len() < 2 {
println!("Filename and block index are required.");
show_usage(&args[0]);
return Err(());
}
show_ref_block(&matches.free[1], matches.free[2].parse().unwrap())
}
"dd" => {
if matches.free.len() < 2 {
println!("Qcow and source file are required.");
show_usage(&args[0]);
return Err(());
}
let count = if matches.free.len() > 3 {
Some(matches.free[3].parse().unwrap())
} else {
None
};
dd(&matches.free[1], &matches.free[2], count)
}
"convert" => {
if matches.free.len() < 2 {
println!("Source and destination files are required.");
show_usage(&args[0]);
return Err(());
}
convert(&matches.free[1], &matches.free[2])
}
c => {
println!("invalid subcommand: {:?}", c);
Err(())
}
}
}
fn show_header(file_path: &str) -> std::result::Result<(), ()> {
let file = match OpenOptions::new().read(true).open(file_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let qcow_file = QcowFile::from(file).map_err(|_| ())?;
let header = qcow_file.header();
println!("magic {:x}", header.magic);
println!("version {:x}", header.version);
println!("backing_file_offset {:x}", header.backing_file_offset);
println!("backing_file_size {:x}", header.backing_file_size);
println!("cluster_bits {:x}", header.cluster_bits);
println!("size {:x}", header.size);
println!("crypt_method {:x}", header.crypt_method);
println!("l1_size {:x}", header.l1_size);
println!("l1_table_offset {:x}", header.l1_table_offset);
println!("refcount_table_offset {:x}", header.refcount_table_offset);
println!(
"refcount_table_clusters {:x}",
header.refcount_table_clusters
);
println!("nb_snapshots {:x}", header.nb_snapshots);
println!("snapshots_offset {:x}", header.snapshots_offset);
println!("incompatible_features {:x}", header.incompatible_features);
println!("compatible_features {:x}", header.compatible_features);
println!("autoclear_features {:x}", header.autoclear_features);
println!("refcount_order {:x}", header.refcount_order);
println!("header_size {:x}", header.header_size);
Ok(())
}
fn show_l1_table(file_path: &str) -> std::result::Result<(), ()> {
let file = match OpenOptions::new().read(true).open(file_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let qcow_file = QcowFile::from(file).map_err(|_| ())?;
let l1_table = qcow_file.l1_table();
for (i, l2_offset) in l1_table.iter().enumerate() {
println!("{}: {:x}", i, l2_offset);
}
Ok(())
}
fn show_l2_table(file_path: &str, index: usize) -> std::result::Result<(), ()> {
let file = match OpenOptions::new().read(true).open(file_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let mut qcow_file = QcowFile::from(file).map_err(|_| ())?;
let l2_table = qcow_file.l2_table(index).unwrap();
if let Some(cluster_addrs) = l2_table {
for (i, addr) in cluster_addrs.iter().enumerate() {
if i % 16 == 0 {
print!("\n{:x}:", i);
}
print!(" {:x}", addr);
}
}
Ok(())
}
fn show_ref_table(file_path: &str) -> std::result::Result<(), ()> {
let file = match OpenOptions::new().read(true).open(file_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let qcow_file = QcowFile::from(file).map_err(|_| ())?;
let ref_table = qcow_file.ref_table();
for (i, block_offset) in ref_table.iter().enumerate() {
println!("{}: {:x}", i, block_offset);
}
Ok(())
}
fn show_ref_block(file_path: &str, index: usize) -> std::result::Result<(), ()> {
let file = match OpenOptions::new().read(true).open(file_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let mut qcow_file = QcowFile::from(file).map_err(|_| ())?;
let ref_table = qcow_file.refcount_block(index).unwrap();
if let Some(counts) = ref_table {
for (i, count) in counts.iter().enumerate() {
if i % 16 == 0 {
print!("\n{:x}:", i);
}
print!(" {:x}", count);
}
}
Ok(())
}
// Transfers from a raw file specifiec in `source_path` to the qcow file specified in `file_path`.
fn dd(file_path: &str, source_path: &str, count: Option<usize>) -> std::result::Result<(), ()> {
let file = match OpenOptions::new().read(true).write(true).open(file_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let mut qcow_file = QcowFile::from(file).map_err(|_| ())?;
let mut src_file = match OpenOptions::new().read(true).open(source_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open {}", file_path);
return Err(());
}
};
let mut read_count = 0;
const CHUNK_SIZE: usize = 65536;
let mut buf = [0; CHUNK_SIZE];
loop {
let this_count = if let Some(count) = count {
std::cmp::min(CHUNK_SIZE, count - read_count)
} else {
CHUNK_SIZE
};
let nread = src_file.read(&mut buf[..this_count]).map_err(|_| ())?;
// If this block is all zeros, then use write_zeros so the output file is sparse.
if buf.iter().all(|b| *b == 0) {
qcow_file.write_zeroes_all(CHUNK_SIZE).map_err(|_| ())?;
} else {
qcow_file.write(&buf).map_err(|_| ())?;
}
read_count = read_count + nread;
if nread == 0 || Some(read_count) == count {
break;
}
}
println!("wrote {} bytes", read_count);
Ok(())
}
// Reads the file at `src_path` and writes it to `dst_path`.
// The output format is detected based on the `dst_path` file extension.
fn convert(src_path: &str, dst_path: &str) -> std::result::Result<(), ()> {
let src_file = match OpenOptions::new().read(true).open(src_path) {
Ok(f) => f,
Err(_) => {
println!("Failed to open source file {}", src_path);
return Err(());
}
};
let dst_file = match OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(dst_path)
{
Ok(f) => f,
Err(_) => {
println!("Failed to open destination file {}", dst_path);
return Err(());
}
};
let dst_type = if dst_path.ends_with("qcow2") {
disk::ImageType::Qcow2
} else {
disk::ImageType::Raw
};
match disk::convert(src_file, dst_file, dst_type) {
Ok(_) => {
println!("Converted {} to {}", src_path, dst_path);
Ok(())
}
Err(_) => {
println!("Failed to copy from {} to {}", src_path, dst_path);
Err(())
}
}
}
| 30.798762 | 98 | 0.526236 |
18f30fd54f5fea5656ae422df627dfeefc080b9e | 60,657 | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
pub mod extension;
mod tz;
pub mod weekmode;
use std::cmp::{min, Ordering};
use std::convert::{TryFrom, TryInto};
use std::fmt::Write;
use std::fmt::{self, Display, Formatter};
use std::{mem, str};
use byteorder::WriteBytesExt;
use chrono::{DateTime, Datelike, Duration, TimeZone, Timelike, Utc};
use tidb_query_datatype::FieldTypeTp;
use tikv_util::codec::number::{self, NumberEncoder};
use tikv_util::codec::BytesSlice;
use crate::codec::convert::ConvertTo;
use crate::codec::mysql::duration::{Duration as MyDuration, NANOS_PER_SEC, NANO_WIDTH};
use crate::codec::mysql::{self, Decimal};
use crate::codec::{Error, Result, TEN_POW};
use crate::expr::EvalContext;
pub use self::extension::*;
pub use self::tz::Tz;
pub use self::weekmode::WeekMode;
const ZERO_DATETIME_NUMERIC_STR: &str = "00000000000000";
const ZERO_DATE_NUMERIC_STR: &str = "00000000";
const ZERO_DATETIME_STR: &str = "0000-00-00 00:00:00";
const ZERO_DATE_STR: &str = "0000-00-00";
/// In go, `time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC)` will be adjusted to
/// `-0001-11-30 00:00:00 +0000 UTC`, whose timestamp is -62169984000.
const ZERO_TIMESTAMP: i64 = -62169984000;
/// In go, `time.Date(9999, 12, 31, 23, 59, 59, 0, time.UTC)` will be adjusted to
/// `9999-12-31 23:59:59 +0000 UTC`, whose timestamp is 253402300799.
pub const MAX_TIMESTAMP: i64 = 253402300799;
pub const MAX_TIME_NANOSECONDS: u32 = 999999000;
pub const MONTH_NAMES: &[&str] = &[
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
];
const MONTH_NAMES_ABBR: &[&str] = &[
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec",
];
#[inline]
fn zero_time(tz: &Tz) -> DateTime<Tz> {
tz.timestamp(ZERO_TIMESTAMP, 0)
}
#[inline]
pub fn zero_datetime(tz: &Tz) -> Time {
Time::new(zero_time(tz), TimeType::DateTime, mysql::DEFAULT_FSP).unwrap()
}
#[allow(clippy::too_many_arguments)]
#[inline]
fn ymd_hms_nanos<T: TimeZone>(
tz: &T,
year: i32,
month: u32,
day: u32,
hour: u32,
min: u32,
secs: u32,
nanos: u32,
) -> Result<DateTime<T>> {
use chrono::NaiveDate;
// Note: We are not using `tz::from_ymd_opt` as suggested in chrono's README due to
// chronotope/chrono-tz #23.
// As a workaround, we first build a NaiveDate, then attach time zone information to it.
NaiveDate::from_ymd_opt(year, month, day)
.and_then(|date| date.and_hms_opt(hour, min, secs))
.and_then(|t| t.checked_add_signed(Duration::nanoseconds(i64::from(nanos))))
.and_then(|datetime| tz.from_local_datetime(&datetime).earliest())
.ok_or_else(|| {
Error::incorrect_datetime_value(&format!(
"{}-{}-{} {}:{}:{}.{:09}",
year, month, day, hour, min, secs, nanos
))
})
}
#[inline]
fn from_bytes(bs: &[u8]) -> &str {
unsafe { str::from_utf8_unchecked(bs) }
}
fn split_ymd_hms_with_frac_as_s(
mut s: &[u8],
frac: &[u8],
) -> Result<(i32, u32, u32, u32, u32, u32)> {
let year: i32;
if s.len() == 14 {
year = box_try!(from_bytes(&s[..4]).parse());
s = &s[4..];
} else {
year = box_try!(from_bytes(&s[..2]).parse());
s = &s[2..];
};
let month: u32 = box_try!(from_bytes(&s[..2]).parse());
let day: u32 = box_try!(from_bytes(&s[2..4]).parse());
let hour: u32 = box_try!(from_bytes(&s[4..6]).parse());
let minute: u32 = if s.len() == 7 {
box_try!(from_bytes(&s[6..7]).parse())
} else {
box_try!(from_bytes(&s[6..8]).parse())
};
let secs: u32 = if s.len() > 8 {
let i = if s.len() > 9 { 10 } else { 9 };
box_try!(from_bytes(&s[8..i]).parse())
} else {
match frac.len() {
0 => 0,
1 => box_try!(from_bytes(&frac[..1]).parse()),
_ => box_try!(from_bytes(&frac[..2]).parse()),
}
};
Ok((year, month, day, hour, minute, secs))
}
fn split_ymd_with_frac_as_hms(
mut s: &[u8],
frac: &[u8],
is_float: bool,
) -> Result<(i32, u32, u32, u32, u32, u32)> {
let year: i32;
if s.len() == 8 {
year = box_try!(from_bytes(&s[..4]).parse());
s = &s[4..];
} else {
year = box_try!(from_bytes(&s[..2]).parse());
s = &s[2..];
};
let month: u32 = box_try!(from_bytes(&s[..2]).parse());
let day: u32 = box_try!(from_bytes(&s[2..]).parse());
let (hour, minute, sec): (u32, u32, u32) = if is_float {
(0, 0, 0)
} else {
match frac.len() {
0 => (0, 0, 0),
1 | 2 => (box_try!(from_bytes(&frac[0..frac.len()]).parse()), 0, 0),
3 | 4 => (
box_try!(from_bytes(&frac[0..2]).parse()),
box_try!(from_bytes(&frac[2..frac.len()]).parse()),
0,
),
5 => (
box_try!(from_bytes(&frac[0..2]).parse()),
box_try!(from_bytes(&frac[2..4]).parse()),
box_try!(from_bytes(&frac[4..5]).parse()),
),
_ => (
box_try!(from_bytes(&frac[0..2]).parse()),
box_try!(from_bytes(&frac[2..4]).parse()),
box_try!(from_bytes(&frac[4..6]).parse()),
),
}
};
Ok((year, month, day, hour, minute, sec))
}
#[derive(Clone, Debug, Copy, PartialEq)]
pub enum TimeType {
Date,
DateTime,
Timestamp,
}
impl From<TimeType> for FieldTypeTp {
fn from(time_type: TimeType) -> FieldTypeTp {
match time_type {
TimeType::Date => FieldTypeTp::Date,
TimeType::DateTime => FieldTypeTp::DateTime,
TimeType::Timestamp => FieldTypeTp::Timestamp,
}
}
}
impl TryFrom<FieldTypeTp> for TimeType {
type Error = Error;
fn try_from(value: FieldTypeTp) -> Result<Self> {
match value {
FieldTypeTp::Date => Ok(TimeType::Date),
FieldTypeTp::DateTime => Ok(TimeType::DateTime),
FieldTypeTp::Timestamp => Ok(TimeType::Timestamp),
FieldTypeTp::Unspecified => Ok(TimeType::DateTime), // FIXME: We should forbid this
_ => Err(box_err!("Time does not support field type {}", value)),
}
}
}
/// `Time` is the struct for handling datetime, timestamp and date.
#[derive(Clone, Debug)]
pub struct Time {
// TimeZone should be loaded from request context.
time: DateTime<Tz>,
time_type: TimeType,
fsp: u8,
}
impl Time {
pub fn new(time: DateTime<Tz>, time_type: TimeType, fsp: i8) -> Result<Time> {
Ok(Time {
time,
time_type,
fsp: mysql::check_fsp(fsp)?,
})
}
pub fn get_time_type(&self) -> TimeType {
self.time_type
}
pub fn set_time_type(&mut self, time_type: TimeType) -> Result<()> {
if self.time_type != time_type && time_type == TimeType::Date {
// Truncate hh:mm::ss part if the type is Date
self.time = self.time.date().and_hms(0, 0, 0); // TODO: might panic!
}
if self.time_type != time_type && time_type == TimeType::Timestamp {
return Err(box_err!("can not convert datetime/date to timestamp"));
}
self.time_type = time_type;
Ok(())
}
pub fn is_zero(&self) -> bool {
self.time.timestamp() == ZERO_TIMESTAMP
}
pub fn invalid_zero(&self) -> bool {
self.time.month() == 0 || self.time.day() == 0
}
pub fn get_fsp(&self) -> u8 {
self.fsp
}
pub fn set_fsp(&mut self, fsp: u8) {
self.fsp = fsp;
}
pub fn get_time(&self) -> DateTime<Tz> {
self.time
}
pub fn set_time(&mut self, time: DateTime<Tz>) {
self.time = time
}
/// Converts a `DateTime` to printable string representation
#[inline]
pub fn to_numeric_string(&self) -> String {
if self.time_type == TimeType::Date {
if self.is_zero() {
String::from(ZERO_DATE_NUMERIC_STR)
} else {
format!("{}", self.time.format("%Y%m%d"))
}
} else {
if self.is_zero() {
if self.fsp > 0 {
// Do we need to round the result?
let nanos = self.time.nanosecond() / TEN_POW[9 - self.fsp as usize];
format!(
"{}.{1:02$}",
ZERO_DATETIME_NUMERIC_STR, nanos, self.fsp as usize
)
} else {
String::from(ZERO_DATETIME_NUMERIC_STR)
}
} else {
if self.fsp > 0 {
let nanos = self.time.nanosecond() / TEN_POW[9 - self.fsp as usize];
format!(
"{}.{1:02$}",
self.time.format("%Y%m%d%H%M%S"),
nanos,
self.fsp as usize
)
} else {
format!("{}", self.time.format("%Y%m%d%H%M%S"))
}
}
}
}
fn parse_datetime_format(s: &str) -> Vec<&str> {
let trimmed = s.trim();
if trimmed.is_empty() {
return vec![];
}
let spes: Vec<&str> = trimmed.split(|c| c < '0' || c > '9').collect();
if spes.iter().any(|s| s.is_empty()) {
vec![]
} else {
spes
}
}
fn split_datetime(s: &str) -> (Vec<&str>, &str) {
let trimmed = s.trim();
if trimmed.is_empty() {
return (vec![], "");
}
let (parts, fracs) = if let Some(i) = trimmed.rfind('.') {
(&trimmed[..i], &trimmed[i + 1..])
} else {
(trimmed, "")
};
(Time::parse_datetime_format(parts), fracs)
}
pub fn parse_utc_datetime(s: &str, fsp: i8) -> Result<Time> {
Time::parse_datetime(s, fsp, &Tz::utc())
}
pub fn parse_utc_datetime_from_float_string(s: &str, fsp: i8) -> Result<Time> {
Time::parse_datetime_from_float_string(s, fsp, &Tz::utc())
}
pub fn parse_datetime(s: &str, fsp: i8, tz: &Tz) -> Result<Time> {
Time::parse_datetime_internal(s, fsp, tz, false)
}
pub fn parse_datetime_from_float_string(s: &str, fsp: i8, tz: &Tz) -> Result<Time> {
Time::parse_datetime_internal(s, fsp, tz, true)
}
fn parse_datetime_internal(s: &str, fsp: i8, tz: &Tz, is_float: bool) -> Result<Time> {
let fsp = mysql::check_fsp(fsp)?;
let mut need_adjust = false;
let mut has_hhmmss = false;
let (parts, frac_str) = Time::split_datetime(s);
let (mut year, month, day, hour, minute, sec): (i32, u32, u32, u32, u32, u32) = match *parts
.as_slice()
{
[s1] => {
need_adjust = s1.len() != 14 && s1.len() != 8;
has_hhmmss = s1.len() == 14 || s1.len() == 12 || s1.len() == 11;
match s1.len() {
14 | 12 | 11 | 10 | 9 => {
split_ymd_hms_with_frac_as_s(s1.as_bytes(), frac_str.as_bytes())?
}
8 | 6 | 5 => {
split_ymd_with_frac_as_hms(s1.as_bytes(), frac_str.as_bytes(), is_float)?
}
_ => {
return Err(box_err!(
"invalid datetime: {}, s1: {}, len: {}",
s,
s1,
s1.len()
));
}
}
}
[year, month, day] => (
box_try!(year.parse()),
box_try!(month.parse()),
box_try!(day.parse()),
0,
0,
0,
),
[year, month, day, hour, min] => (
box_try!(year.parse()),
box_try!(month.parse()),
box_try!(day.parse()),
box_try!(hour.parse()),
box_try!(min.parse()),
0,
),
[year, month, day, hour, min, sec] => {
has_hhmmss = true;
(
box_try!(year.parse()),
box_try!(month.parse()),
box_try!(day.parse()),
box_try!(hour.parse()),
box_try!(min.parse()),
box_try!(sec.parse()),
)
}
_ => return Err(Error::incorrect_datetime_value(s)),
};
if need_adjust || parts[0].len() == 2 {
if year >= 0 && year <= 69 {
year += 2000;
} else if year >= 70 && year <= 99 {
year += 1900;
}
}
let frac = if has_hhmmss {
mysql::parse_frac(frac_str.as_bytes(), fsp)?
} else {
0
};
if year == 0 && month == 0 && day == 0 && hour == 0 && minute == 0 && sec == 0 {
return Ok(zero_datetime(tz));
}
// it won't happen until 10000
if year < 0 || year > 9999 {
return Err(box_err!("unsupport year: {}", year));
}
let time = ymd_hms_nanos(
tz,
year,
month,
day,
hour,
minute,
sec,
frac * TEN_POW[9 - fsp as usize],
)?;
Time::new(time, TimeType::DateTime, fsp as i8)
}
pub fn parse_fsp(s: &str) -> i8 {
s.rfind('.').map_or(super::DEFAULT_FSP, |idx| {
min((s.len() - idx - 1) as i8, super::MAX_FSP)
})
}
/// Get time from packed u64. When `tp` is `TIMESTAMP`, the packed time should
/// be a UTC time; otherwise the packed time should be in the same timezone as `tz`
/// specified.
pub fn from_packed_u64(u: u64, time_type: TimeType, fsp: i8, tz: &Tz) -> Result<Time> {
if u == 0 {
return Time::new(zero_time(tz), time_type, fsp);
}
let fsp = mysql::check_fsp(fsp)?;
let ymdhms = u >> 24;
let ymd = ymdhms >> 17;
let day = (ymd & ((1 << 5) - 1)) as u32;
let ym = ymd >> 5;
let month = (ym % 13) as u32;
let year = (ym / 13) as i32;
let hms = ymdhms & ((1 << 17) - 1);
let second = (hms & ((1 << 6) - 1)) as u32;
let minute = ((hms >> 6) & ((1 << 6) - 1)) as u32;
let hour = (hms >> 12) as u32;
let nanosec = ((u & ((1 << 24) - 1)) * 1000) as u32;
let t = if time_type == TimeType::Timestamp {
let t = ymd_hms_nanos(&Utc, year, month, day, hour, minute, second, nanosec)?;
tz.from_utc_datetime(&t.naive_utc())
} else {
ymd_hms_nanos(tz, year, month, day, hour, minute, second, nanosec)?
};
Time::new(t, time_type, fsp as i8)
}
pub fn from_duration(tz: &Tz, time_type: TimeType, d: MyDuration) -> Result<Time> {
let dur = Duration::nanoseconds(d.to_nanos());
let t = Utc::now()
.with_timezone(tz)
.date()
.and_hms(0, 0, 0) // TODO: might panic!
.checked_add_signed(dur);
if t.is_none() {
return Err(box_err!("parse from duration {} overflows", d));
}
let t = t.unwrap();
if t.year() < 1000 || t.year() > 9999 {
return Err(box_err!(
"datetime :{} out of range ('1000-01-01' to '9999-12-31')",
t
));
}
if time_type == TimeType::Date {
let t = t.date().and_hms(0, 0, 0); // TODO: might panic!
Time::new(t, time_type, d.fsp() as i8)
} else {
Time::new(t, time_type, d.fsp() as i8)
}
}
pub fn to_duration(&self) -> Result<MyDuration> {
if self.is_zero() {
return Ok(MyDuration::zero());
}
let nanos = i64::from(self.time.num_seconds_from_midnight()) * NANOS_PER_SEC
+ i64::from(self.time.nanosecond());
MyDuration::from_nanos(nanos, self.fsp as i8)
}
/// Serialize time to a u64.
///
/// If `tp` is TIMESTAMP, it will be converted to a UTC time first.
pub fn to_packed_u64(&self) -> u64 {
if self.is_zero() {
return 0;
}
let t = if self.time_type == TimeType::Timestamp {
self.time.naive_utc()
} else {
self.time.naive_local()
};
let ymd = ((t.year() as u64 * 13 + u64::from(t.month())) << 5) | u64::from(t.day());
let hms =
(u64::from(t.hour()) << 12) | (u64::from(t.minute()) << 6) | u64::from(t.second());
let micro = u64::from(t.nanosecond()) / 1000;
(((ymd << 17) | hms) << 24) | micro
}
pub fn round_frac(&mut self, fsp: i8) -> Result<()> {
if self.time_type == TimeType::Date || self.is_zero() {
// date type has no fsp
return Ok(());
}
let fsp = mysql::check_fsp(fsp)?;
if fsp == self.fsp {
return Ok(());
}
// TODO:support case month or day is 0(2012-00-00 12:12:12)
let nanos = self.time.nanosecond();
let base = TEN_POW[NANO_WIDTH - usize::from(fsp)];
let expect_nanos = ((f64::from(nanos) / f64::from(base)).round() as u32) * base;
let diff = i64::from(nanos) - i64::from(expect_nanos);
let new_time = self.time.checked_add_signed(Duration::nanoseconds(diff));
if new_time.is_none() {
Err(box_err!("round_frac {} overflows", self.time))
} else {
self.time = new_time.unwrap();
self.fsp = fsp;
Ok(())
}
}
fn write_date_format_segment(&self, b: char, output: &mut String) -> Result<()> {
match b {
'b' => {
let m = self.time.month();
if m == 0 || m > 12 {
return Err(box_err!("invalid time format"));
} else {
output.push_str(MONTH_NAMES_ABBR[(m - 1) as usize]);
}
}
'M' => {
let m = self.time.month();
if m == 0 || m > 12 {
return Err(box_err!("invalid time format"));
} else {
output.push_str(MONTH_NAMES[(m - 1) as usize]);
}
}
'm' => {
write!(output, "{:02}", self.time.month()).unwrap();
}
'c' => {
write!(output, "{}", self.time.month()).unwrap();
}
'D' => {
write!(
output,
"{}{}",
self.time.day(),
self.time.abbr_day_of_month()
)
.unwrap();
}
'd' => {
write!(output, "{:02}", self.time.day()).unwrap();
}
'e' => {
write!(output, "{}", self.time.day()).unwrap();
}
'j' => {
write!(output, "{:03}", self.time.days()).unwrap();
}
'H' => {
write!(output, "{:02}", self.time.hour()).unwrap();
}
'k' => {
write!(output, "{}", self.time.hour()).unwrap();
}
'h' | 'I' => {
let t = self.time.hour();
if t == 0 || t == 12 {
output.push_str("12");
} else {
write!(output, "{:02}", t % 12).unwrap();
}
}
'l' => {
let t = self.time.hour();
if t == 0 || t == 12 {
output.push_str("12");
} else {
write!(output, "{}", t % 12).unwrap();
}
}
'i' => {
write!(output, "{:02}", self.time.minute()).unwrap();
}
'p' => {
let hour = self.time.hour();
if (hour / 12) % 2 == 0 {
output.push_str("AM")
} else {
output.push_str("PM")
}
}
'r' => {
let h = self.time.hour();
if h == 0 {
write!(
output,
"{:02}:{:02}:{:02} AM",
12,
self.time.minute(),
self.time.second()
)
.unwrap();
} else if h == 12 {
write!(
output,
"{:02}:{:02}:{:02} PM",
12,
self.time.minute(),
self.time.second()
)
.unwrap();
} else if h < 12 {
write!(
output,
"{:02}:{:02}:{:02} AM",
h,
self.time.minute(),
self.time.second()
)
.unwrap();
} else {
write!(
output,
"{:02}:{:02}:{:02} PM",
h - 12,
self.time.minute(),
self.time.second()
)
.unwrap();
}
}
'T' => {
write!(
output,
"{:02}:{:02}:{:02}",
self.time.hour(),
self.time.minute(),
self.time.second()
)
.unwrap();
}
'S' | 's' => {
write!(output, "{:02}", self.time.second()).unwrap();
}
'f' => {
write!(output, "{:06}", self.time.nanosecond() / 1000).unwrap();
}
'U' => {
let w = self.time.week(WeekMode::from_bits_truncate(0));
write!(output, "{:02}", w).unwrap();
}
'u' => {
let w = self.time.week(WeekMode::from_bits_truncate(1));
write!(output, "{:02}", w).unwrap();
}
'V' => {
let w = self.time.week(WeekMode::from_bits_truncate(2));
write!(output, "{:02}", w).unwrap();
}
'v' => {
let (_, w) = self.time.year_week(WeekMode::from_bits_truncate(3));
write!(output, "{:02}", w).unwrap();
}
'a' => {
output.push_str(self.time.weekday().name_abbr());
}
'W' => {
output.push_str(self.time.weekday().name());
}
'w' => {
write!(output, "{}", self.time.weekday().num_days_from_sunday()).unwrap();
}
'X' => {
let (year, _) = self.time.year_week(WeekMode::from_bits_truncate(2));
if year < 0 {
write!(output, "{}", u32::max_value()).unwrap();
} else {
write!(output, "{:04}", year).unwrap();
}
}
'x' => {
let (year, _) = self.time.year_week(WeekMode::from_bits_truncate(3));
if year < 0 {
write!(output, "{}", u32::max_value()).unwrap();
} else {
write!(output, "{:04}", year).unwrap();
}
}
'Y' => {
write!(output, "{:04}", self.time.year()).unwrap();
}
'y' => {
write!(output, "{:02}", self.time.year() % 100).unwrap();
}
_ => output.push(b),
}
Ok(())
}
pub fn date_format(&self, layout: &str) -> Result<String> {
let mut ret = String::new();
let mut pattern_match = false;
for b in layout.chars() {
if pattern_match {
self.write_date_format_segment(b, &mut ret)?;
pattern_match = false;
continue;
}
if b == '%' {
pattern_match = true;
} else {
ret.push(b);
}
}
Ok(ret)
}
pub fn is_leap_year(&self) -> bool {
self.time.year() % 4 == 0 && (self.time.year() % 100 != 0 || self.time.year() % 400 == 0)
}
pub fn last_day_of_month(&self) -> u32 {
match self.time.month() {
4 | 6 | 9 | 11 => 30,
2 => {
if self.is_leap_year() {
29
} else {
28
}
}
_ => 31,
}
}
/// Checked time addition. Computes self + rhs, returning None if overflow occurred.
pub fn checked_add(self, rhs: MyDuration) -> Option<Time> {
if let Some(add) = self
.time
.checked_add_signed(Duration::nanoseconds(rhs.to_nanos()))
{
if add.year() > 9999 {
return None;
}
let mut res = self;
res.set_time(add);
Some(res)
} else {
None
}
}
/// Checked time subtraction. Computes self - rhs, returning None if overflow occurred.
pub fn checked_sub(self, rhs: MyDuration) -> Option<Time> {
if let Some(sub) = self
.time
.checked_sub_signed(Duration::nanoseconds(rhs.to_nanos()))
{
if sub.year() < 0 {
return None;
}
let mut res = self;
res.set_time(sub);
Some(res)
} else {
None
}
}
}
impl ConvertTo<f64> for Time {
fn convert(&self, _: &mut EvalContext) -> Result<f64> {
if self.is_zero() {
return Ok(0f64);
}
let f: f64 = box_try!(self.to_numeric_string().parse());
Ok(f)
}
}
impl ConvertTo<Decimal> for Time {
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<Decimal> {
if self.is_zero() {
return Ok(0.into());
}
self.to_numeric_string().parse()
}
}
impl PartialOrd for Time {
fn partial_cmp(&self, right: &Time) -> Option<Ordering> {
Some(self.cmp(right))
}
}
impl PartialEq for Time {
fn eq(&self, right: &Time) -> bool {
self.time.eq(&right.time)
}
}
impl Eq for Time {}
impl Ord for Time {
fn cmp(&self, right: &Time) -> Ordering {
self.time.cmp(&right.time)
}
}
impl Display for Time {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
if self.is_zero() {
if self.time_type == TimeType::Date {
return f.write_str(ZERO_DATE_STR);
}
return f.write_str(ZERO_DATETIME_STR);
}
if self.time_type == TimeType::Date {
if self.is_zero() {
return f.write_str(ZERO_DATE_STR);
} else {
return write!(f, "{}", self.time.format("%Y-%m-%d"));
}
}
if self.is_zero() {
f.write_str(ZERO_DATETIME_STR)?;
} else {
write!(f, "{}", self.time.format("%Y-%m-%d %H:%M:%S"))?;
}
if self.fsp > 0 {
// Do we need to round the result?
let nanos = self.time.nanosecond() / TEN_POW[9 - self.fsp as usize];
write!(f, ".{0:01$}", nanos, self.fsp as usize)?;
}
Ok(())
}
}
impl<T: std::io::Write> TimeEncoder for T {}
/// Time Encoder for Chunk format
pub trait TimeEncoder: NumberEncoder {
fn encode_time(&mut self, v: &Time) -> Result<()> {
use num::ToPrimitive;
if !v.is_zero() {
self.encode_u16(v.time.year() as u16)?;
self.write_u8(v.time.month() as u8)?;
self.write_u8(v.time.day() as u8)?;
self.write_u8(v.time.hour() as u8)?;
self.write_u8(v.time.minute() as u8)?;
self.write_u8(v.time.second() as u8)?;
self.encode_u32(v.time.nanosecond() / 1000)?;
} else {
let len = mem::size_of::<u16>() + mem::size_of::<u32>() + 5;
let buf = vec![0; len];
self.write_all(&buf)?;
}
let tp: FieldTypeTp = v.time_type.into();
self.write_u8(tp.to_u8().unwrap())?;
self.write_u8(v.fsp).map_err(From::from)
}
}
impl Time {
/// `decode` decodes time encoded by `encode_time` for Chunk format.
pub fn decode(data: &mut BytesSlice<'_>) -> Result<Time> {
use num_traits::FromPrimitive;
let year = i32::from(number::decode_u16(data)?);
let (month, day, hour, minute, second) = if data.len() >= 5 {
(
u32::from(data[0]),
u32::from(data[1]),
u32::from(data[2]),
u32::from(data[3]),
u32::from(data[4]),
)
} else {
return Err(Error::unexpected_eof());
};
*data = &data[5..];
let nanoseconds = 1000 * number::decode_u32(data)?;
let (tp, fsp) = if data.len() >= 2 {
(
FieldTypeTp::from_u8(data[0]).unwrap_or(FieldTypeTp::Unspecified),
data[1],
)
} else {
return Err(Error::unexpected_eof());
};
*data = &data[2..];
let tz = Tz::utc(); // TODO
if year == 0
&& month == 0
&& day == 0
&& hour == 0
&& minute == 0
&& second == 0
&& nanoseconds == 0
{
return Ok(zero_datetime(&tz));
}
let t = if tp == FieldTypeTp::Timestamp {
let t = ymd_hms_nanos(&Utc, year, month, day, hour, minute, second, nanoseconds)?;
tz.from_utc_datetime(&t.naive_utc())
} else {
ymd_hms_nanos(
&Tz::utc(),
year,
month,
day,
hour,
minute,
second,
nanoseconds,
)?
};
Time::new(t, tp.try_into()?, fsp as i8)
}
}
impl crate::codec::data_type::AsMySQLBool for Time {
#[inline]
fn as_mysql_bool(&self, _context: &mut crate::expr::EvalContext) -> crate::Result<bool> {
Ok(!self.is_zero())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::cmp::Ordering;
use std::f64::EPSILON;
use chrono::{Duration, Local};
use crate::codec::mysql::{Duration as MyDuration, MAX_FSP, UNSPECIFIED_FSP};
use crate::expr::EvalContext;
fn for_each_tz<F: FnMut(Tz, i64)>(mut f: F) {
const MIN_OFFSET: i64 = -60 * 24 + 1;
const MAX_OFFSET: i64 = 60 * 24;
// test some offset
for mut offset in MIN_OFFSET..MAX_OFFSET {
offset *= 60;
let tz = Tz::from_offset(offset).unwrap();
f(tz, offset)
}
// test some time zone name without DST
let tz_table = vec![
("Etc/GMT+11", -39600),
("Etc/GMT0", 0),
("Etc/GMT-5", 18000),
("UTC", 0),
("Universal", 0),
];
for (name, offset) in tz_table {
let tz = Tz::from_tz_name(name).unwrap();
f(tz, offset)
}
}
#[test]
fn test_parse_datetime() {
let ok_tables = vec![
(
"2012-12-31 11:30:45",
UNSPECIFIED_FSP,
"2012-12-31 11:30:45",
),
(
"0000-00-00 00:00:00",
UNSPECIFIED_FSP,
"0000-00-00 00:00:00",
),
(
"0001-01-01 00:00:00",
UNSPECIFIED_FSP,
"0001-01-01 00:00:00",
),
("00-12-31 11:30:45", UNSPECIFIED_FSP, "2000-12-31 11:30:45"),
("12-12-31 11:30:45", UNSPECIFIED_FSP, "2012-12-31 11:30:45"),
("2012-12-31", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
("20121231", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
("121231", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
("121231", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
("12121", UNSPECIFIED_FSP, "2012-12-01 00:00:00"),
(
"2012^12^31 11+30+45",
UNSPECIFIED_FSP,
"2012-12-31 11:30:45",
),
(
"2012^12^31T11+30+45",
UNSPECIFIED_FSP,
"2012-12-31 11:30:45",
),
("2012-2-1 11:30:45", UNSPECIFIED_FSP, "2012-02-01 11:30:45"),
("12-2-1 11:30:45", UNSPECIFIED_FSP, "2012-02-01 11:30:45"),
("20121231113045", UNSPECIFIED_FSP, "2012-12-31 11:30:45"),
("121231113045", UNSPECIFIED_FSP, "2012-12-31 11:30:45"),
("2012-02-29", UNSPECIFIED_FSP, "2012-02-29 00:00:00"),
("121231113045.123345", 6, "2012-12-31 11:30:45.123345"),
("20121231113045.123345", 6, "2012-12-31 11:30:45.123345"),
("121231113045.9999999", 6, "2012-12-31 11:30:46.000000"),
("121231113045.999999", 6, "2012-12-31 11:30:45.999999"),
("121231113045.999999", 5, "2012-12-31 11:30:46.00000"),
("17011801101", UNSPECIFIED_FSP, "2017-01-18 01:10:01"),
("20170118.1", UNSPECIFIED_FSP, "2017-01-18 01:00:00"),
("20170118.1", UNSPECIFIED_FSP, "2017-01-18 01:00:00"),
("20170118.11", UNSPECIFIED_FSP, "2017-01-18 11:00:00"),
("20170118.111", UNSPECIFIED_FSP, "2017-01-18 11:01:00"),
("20170118.1111", UNSPECIFIED_FSP, "2017-01-18 11:11:00"),
("20170118.11111", UNSPECIFIED_FSP, "2017-01-18 11:11:01"),
("20170118.111111", UNSPECIFIED_FSP, "2017-01-18 11:11:11"),
("20170118.1111111", UNSPECIFIED_FSP, "2017-01-18 11:11:11"),
("20170118.11111111", UNSPECIFIED_FSP, "2017-01-18 11:11:11"),
("1701020301.", UNSPECIFIED_FSP, "2017-01-02 03:01:00"),
("1701020304.1", UNSPECIFIED_FSP, "2017-01-02 03:04:01"),
("1701020302.11", UNSPECIFIED_FSP, "2017-01-02 03:02:11"),
("170102036", UNSPECIFIED_FSP, "2017-01-02 03:06:00"),
("170102039.", UNSPECIFIED_FSP, "2017-01-02 03:09:00"),
("170102037.11", UNSPECIFIED_FSP, "2017-01-02 03:07:11"),
("17011801101.111111", UNSPECIFIED_FSP, "2017-01-18 01:10:01"),
];
for (input, fsp, exp) in ok_tables {
let utc_t = Time::parse_utc_datetime(input, fsp).unwrap();
assert_eq!(format!("{}", utc_t), exp);
for_each_tz(move |tz, offset| {
let t = Time::parse_datetime(input, fsp, &tz).unwrap();
if utc_t.is_zero() {
assert_eq!(t, utc_t);
} else {
let exp_t = Time::new(
utc_t.time - Duration::seconds(offset),
utc_t.time_type,
utc_t.fsp as i8,
)
.unwrap();
assert_eq!(exp_t, t);
}
});
}
// Test parse datetime from float string vs non-float string
let ok_tables = vec![
(
"121231.0101",
UNSPECIFIED_FSP,
"2012-12-31 00:00:00",
"2012-12-31 01:01:00",
),
(
"121231.1",
UNSPECIFIED_FSP,
"2012-12-31 00:00:00",
"2012-12-31 01:00:00",
),
(
"19991231.111",
UNSPECIFIED_FSP,
"1999-12-31 00:00:00",
"1999-12-31 11:01:00",
),
(
"20121231.1",
UNSPECIFIED_FSP,
"2012-12-31 00:00:00",
"2012-12-31 01:00:00",
),
];
for (input, fsp, exp_float, exp_non_float) in ok_tables {
let utc_t = Time::parse_utc_datetime_from_float_string(input, fsp).unwrap();
assert_eq!(format!("{}", utc_t), exp_float);
let utc_t = Time::parse_utc_datetime(input, fsp).unwrap();
assert_eq!(format!("{}", utc_t), exp_non_float);
}
let fail_tbl = vec![
"1000-00-00 00:00:00",
"1000-01-01 00:00:70",
"1000-13-00 00:00:00",
"10000-01-01 00:00:00",
"1000-09-31 00:00:00",
"1001-02-29 00:00:00",
"20170118.999",
];
for t in fail_tbl {
let tz = Tz::utc();
assert!(Time::parse_datetime(t, 0, &tz).is_err(), t);
}
}
#[test]
fn test_parse_datetime_dst() {
let ok_tables = vec![
("Asia/Shanghai", "1988-04-09 23:59:59", 576604799),
// No longer DST since tzdata 2018f
("Asia/Shanghai", "1988-04-10 00:00:00", 576604800),
("Asia/Shanghai", "1988-04-10 01:00:00", 576608400),
// DST starts from 02:00
("Asia/Shanghai", "1988-04-17 01:00:00", 577213200),
("Asia/Shanghai", "1988-04-17 01:59:59", 577216799),
("Asia/Shanghai", "1988-04-17 03:00:00", 577216800),
// DST ends at 02:00
("Asia/Shanghai", "1988-09-11 00:59:59", 589910399),
("Asia/Shanghai", "1988-09-11 01:00:00", 589910400), // ambiguous
("Asia/Shanghai", "1988-09-11 01:59:59", 589913999), // ambiguous
("Asia/Shanghai", "1988-09-11 02:00:00", 589917600),
("Asia/Shanghai", "1988-09-11 02:00:01", 589917601),
("Asia/Shanghai", "2015-01-02 23:59:59", 1420214399),
("America/Los_Angeles", "1919-03-30 01:59:59", -1601820001),
("America/Los_Angeles", "1919-03-30 03:00:00", -1601820000),
("America/Los_Angeles", "2011-03-13 01:59:59", 1300010399),
("America/Los_Angeles", "2011-03-13 03:00:00", 1300010400),
("America/Los_Angeles", "2011-11-06 01:59:59", 1320569999), // ambiguous
("America/Los_Angeles", "2011-11-06 02:00:00", 1320573600),
("America/Toronto", "2013-11-18 11:55:00", 1384793700),
];
for (tz_name, time_str, utc_timestamp) in ok_tables {
let tz = Tz::from_tz_name(tz_name).unwrap();
let t = Time::parse_datetime(time_str, UNSPECIFIED_FSP, &tz).unwrap();
assert_eq!(
t.time.timestamp(),
utc_timestamp,
"{} {}",
tz_name,
time_str
);
}
// TODO: When calling `UNIX_TIMESTAMP()` in MySQL, these date time will not fail.
// However it will fail when inserting into a TIMESTAMP field.
let fail_tables = vec![
("Asia/Shanghai", "1988-04-17 02:00:00"),
("Asia/Shanghai", "1988-04-17 02:59:59"),
("America/Los_Angeles", "1919-03-30 02:00:00"),
("America/Los_Angeles", "1919-03-30 02:59:59"),
("America/Los_Angeles", "2011-03-13 02:00:00"),
("America/Los_Angeles", "2011-03-13 02:59:59"),
];
for (tz_name, time_str) in fail_tables {
let tz = Tz::from_tz_name(tz_name).unwrap();
assert!(
Time::parse_datetime(time_str, UNSPECIFIED_FSP, &tz).is_err(),
"{} {}",
tz_name,
time_str,
);
}
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn test_parse_datetime_system_timezone() {
// Basically, we check whether the parse result is the same when constructing using local.
let tables = vec![
(1988, 04, 09, 23, 59, 59),
(1988, 04, 10, 01, 00, 00),
(1988, 09, 11, 00, 00, 00),
(1988, 09, 11, 00, 00, 01),
(1988, 09, 10, 23, 59, 59),
(1988, 09, 10, 23, 00, 00),
(1988, 09, 10, 22, 59, 59),
(2015, 01, 02, 23, 59, 59),
(1919, 03, 30, 01, 59, 59),
(1919, 03, 30, 03, 00, 00),
(1988, 04, 10, 00, 00, 00),
(1988, 04, 10, 00, 59, 59),
];
// These are supposed to be local time zones
let local_tzs = vec![
Tz::from_tz_name("SYSTEM").unwrap(),
Tz::from_tz_name("system").unwrap(),
Tz::from_tz_name("System").unwrap(),
Tz::local(),
];
for (year, month, day, hour, minute, second) in tables {
for tz in &local_tzs {
// Some Date time listed in the test case may be invalid in the current time zone,
// so we need to check it first.
let local_time = Local
.ymd_opt(year, month, day)
.and_hms_opt(hour, minute, second)
.earliest();
if let Some(local_time) = local_time {
let time_str =
format!("{}-{}-{} {}:{}:{}", year, month, day, hour, minute, second);
let t = Time::parse_datetime(&time_str, UNSPECIFIED_FSP, tz).unwrap();
assert_eq!(t.time, local_time);
}
}
}
}
#[test]
fn test_codec() {
let cases = vec![
("2010-10-10 10:11:11", 0),
("0001-01-01 00:00:00", 0),
("0001-01-01 00:00:00", UNSPECIFIED_FSP),
("2000-01-01 00:00:00.000000", MAX_FSP),
("2000-01-01 00:00:00.123456", MAX_FSP),
("0001-01-01 00:00:00.123456", MAX_FSP),
("2000-06-01 00:00:00.999999", MAX_FSP),
];
for (s, fsp) in cases {
for_each_tz(move |tz, offset| {
let t = Time::parse_datetime(s, fsp, &tz).unwrap();
let packed = t.to_packed_u64();
let reverted_datetime =
Time::from_packed_u64(packed, TimeType::DateTime, fsp, &tz).unwrap();
assert_eq!(reverted_datetime, t);
assert_eq!(reverted_datetime.to_packed_u64(), packed);
let reverted_timestamp =
Time::from_packed_u64(packed, TimeType::Timestamp, fsp, &tz).unwrap();
assert_eq!(
reverted_timestamp.time,
reverted_datetime.time + Duration::seconds(offset)
);
assert_eq!(reverted_timestamp.to_packed_u64(), packed);
})
}
}
#[test]
fn test_to_numeric_string() {
let cases = vec![
("2012-12-31 11:30:45.123456", 4, "20121231113045.1235"),
("2012-12-31 11:30:45.123456", 6, "20121231113045.123456"),
("2012-12-31 11:30:45.123456", 0, "20121231113045"),
("2012-12-31 11:30:45.999999", 0, "20121231113046"),
("2017-01-05 08:40:59.575601", 0, "20170105084100"),
("2017-01-05 23:59:59.575601", 0, "20170106000000"),
("0000-00-00 00:00:00", 6, "00000000000000"),
];
for (s, fsp, expect) in cases {
let t = Time::parse_utc_datetime(s, fsp).unwrap();
let get = t.to_numeric_string();
assert_eq!(get, expect);
}
}
#[test]
fn test_to_decimal() {
let cases = vec![
("2012-12-31 11:30:45.123456", 4, "20121231113045.1235"),
("2012-12-31 11:30:45.123456", 6, "20121231113045.123456"),
("2012-12-31 11:30:45.123456", 0, "20121231113045"),
("2012-12-31 11:30:45.999999", 0, "20121231113046"),
("2017-01-05 08:40:59.575601", 0, "20170105084100"),
("2017-01-05 23:59:59.575601", 0, "20170106000000"),
("0000-00-00 00:00:00", 6, "0"),
];
let mut ctx = EvalContext::default();
for (s, fsp, expect) in cases {
let t = Time::parse_utc_datetime(s, fsp).unwrap();
let get: Decimal = t.convert(&mut ctx).unwrap();
assert_eq!(
get,
expect.as_bytes().convert(&mut ctx).unwrap(),
"convert datetime {} to decimal",
s
);
}
}
#[test]
fn test_to_dec() {
let cases = vec![
("12-12-31 11:30:45", 0, "20121231113045", "20121231"),
("12-12-31 11:30:45", 6, "20121231113045.000000", "20121231"),
(
"12-12-31 11:30:45.123",
6,
"20121231113045.123000",
"20121231",
),
("12-12-31 11:30:45.123345", 0, "20121231113045", "20121231"),
(
"12-12-31 11:30:45.123345",
3,
"20121231113045.123",
"20121231",
),
(
"12-12-31 11:30:45.123345",
5,
"20121231113045.12335",
"20121231",
),
(
"12-12-31 11:30:45.123345",
6,
"20121231113045.123345",
"20121231",
),
(
"12-12-31 11:30:45.1233457",
6,
"20121231113045.123346",
"20121231",
),
("12-12-31 11:30:45.823345", 0, "20121231113046", "20121231"),
];
for (t_str, fsp, datetime_dec, date_dec) in cases {
for_each_tz(move |tz, _offset| {
let mut ctx = EvalContext::default();
let mut t = Time::parse_datetime(t_str, fsp, &tz).unwrap();
let dec: Result<Decimal> = t.convert(&mut ctx);
let mut res = format!("{}", dec.unwrap());
assert_eq!(res, datetime_dec);
t = Time::parse_datetime(t_str, 0, &tz).unwrap();
t.set_time_type(TimeType::Date).unwrap();
let dec: Result<Decimal> = t.convert(&mut ctx);
res = format!("{}", dec.unwrap());
assert_eq!(res, date_dec);
});
}
}
#[test]
fn test_convert_to_f64() {
let cases = vec![
("2012-12-31 11:30:45.123456", 4, 20121231113045.1235f64),
("2012-12-31 11:30:45.123456", 6, 20121231113045.123456f64),
("2012-12-31 11:30:45.123456", 0, 20121231113045f64),
("2012-12-31 11:30:45.999999", 0, 20121231113046f64),
("2017-01-05 08:40:59.575601", 0, 20170105084100f64),
("2017-01-05 23:59:59.575601", 0, 20170106000000f64),
("0000-00-00 00:00:00", 6, 0f64),
];
let mut ctx = EvalContext::default();
for (s, fsp, expect) in cases {
let t = Time::parse_utc_datetime(s, fsp).unwrap();
let get: f64 = t.convert(&mut ctx).unwrap();
assert!(
(expect - get).abs() < EPSILON,
"expect: {}, got: {}",
expect,
get
);
}
}
#[test]
fn test_compare() {
let cases = vec![
(
"2011-10-10 11:11:11",
"2011-10-10 11:11:11",
Ordering::Equal,
),
(
"2011-10-10 11:11:11.123456",
"2011-10-10 11:11:11.1",
Ordering::Greater,
),
(
"2011-10-10 11:11:11",
"2011-10-10 11:11:11.123",
Ordering::Less,
),
("0000-00-00 00:00:00", "2011-10-10 11:11:11", Ordering::Less),
(
"0000-00-00 00:00:00",
"0000-00-00 00:00:00",
Ordering::Equal,
),
];
for (l, r, exp) in cases {
for_each_tz(move |tz, _offset| {
let l_t = Time::parse_datetime(l, MAX_FSP, &tz).unwrap();
let r_t = Time::parse_datetime(r, MAX_FSP, &tz).unwrap();
assert_eq!(exp, l_t.cmp(&r_t));
});
}
}
#[test]
fn test_parse_datetime_format() {
let cases = vec![
(
"2011-11-11 10:10:10.123456",
vec!["2011", "11", "11", "10", "10", "10", "123456"],
),
(
" 2011-11-11 10:10:10.123456 ",
vec!["2011", "11", "11", "10", "10", "10", "123456"],
),
("2011-11-11 10", vec!["2011", "11", "11", "10"]),
(
"2011-11-11T10:10:10.123456",
vec!["2011", "11", "11", "10", "10", "10", "123456"],
),
(
"2011:11:11T10:10:10.123456",
vec!["2011", "11", "11", "10", "10", "10", "123456"],
),
("xx2011-11-11 10:10:10", vec![]),
("T10:10:10", vec![]),
("2011-11-11x", vec![]),
("2011-11-11 10:10:10", vec![]),
("xxx 10:10:10", vec![]),
];
for (s, exp) in cases {
let res = Time::parse_datetime_format(s);
assert_eq!(res, exp);
}
}
#[test]
fn test_round_frac() {
let ok_tables = vec![
(
"2012-12-31 11:30:45",
UNSPECIFIED_FSP,
"2012-12-31 11:30:45",
),
(
"0000-00-00 00:00:00",
UNSPECIFIED_FSP,
"0000-00-00 00:00:00",
),
(
"0001-01-01 00:00:00",
UNSPECIFIED_FSP,
"0001-01-01 00:00:00",
),
("00-12-31 11:30:45", UNSPECIFIED_FSP, "2000-12-31 11:30:45"),
("12-12-31 11:30:45", UNSPECIFIED_FSP, "2012-12-31 11:30:45"),
("2012-12-31", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
("20121231", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
("121231", UNSPECIFIED_FSP, "2012-12-31 00:00:00"),
(
"2012^12^31 11+30+45",
UNSPECIFIED_FSP,
"2012-12-31 11:30:45",
),
(
"2012^12^31T11+30+45",
UNSPECIFIED_FSP,
"2012-12-31 11:30:45",
),
("2012-2-1 11:30:45", UNSPECIFIED_FSP, "2012-02-01 11:30:45"),
("12-2-1 11:30:45", UNSPECIFIED_FSP, "2012-02-01 11:30:45"),
("20121231113045", UNSPECIFIED_FSP, "2012-12-31 11:30:45"),
("121231113045", UNSPECIFIED_FSP, "2012-12-31 11:30:45"),
("2012-02-29", UNSPECIFIED_FSP, "2012-02-29 00:00:00"),
("121231113045.123345", 6, "2012-12-31 11:30:45.123345"),
("20121231113045.123345", 6, "2012-12-31 11:30:45.123345"),
("121231113045.9999999", 6, "2012-12-31 11:30:46.000000"),
("121231113045.999999", 6, "2012-12-31 11:30:45.999999"),
("121231113045.999999", 5, "2012-12-31 11:30:46.00000"),
("2012-12-31 11:30:45.123456", 4, "2012-12-31 11:30:45.1235"),
(
"2012-12-31 11:30:45.123456",
6,
"2012-12-31 11:30:45.123456",
),
("2012-12-31 11:30:45.123456", 0, "2012-12-31 11:30:45"),
("2012-12-31 11:30:45.123456", 1, "2012-12-31 11:30:45.1"),
("2012-12-31 11:30:45.999999", 4, "2012-12-31 11:30:46.0000"),
("2012-12-31 11:30:45.999999", 0, "2012-12-31 11:30:46"),
("2012-12-31 23:59:59.999999", 0, "2013-01-01 00:00:00"),
("2012-12-31 23:59:59.999999", 3, "2013-01-01 00:00:00.000"),
// TODO: TIDB can handle this case, but we can't.
//("2012-00-00 11:30:45.999999", 3, "2012-00-00 11:30:46.000"),
// TODO: MySQL can handle this case, but we can't.
// ("2012-01-00 23:59:59.999999", 3, "2012-01-01 00:00:00.000"),
];
for (input, fsp, exp) in ok_tables {
let mut utc_t = Time::parse_utc_datetime(input, UNSPECIFIED_FSP).unwrap();
utc_t.round_frac(fsp).unwrap();
let expect = Time::parse_utc_datetime(exp, UNSPECIFIED_FSP).unwrap();
assert_eq!(
utc_t, expect,
"input:{:?}, exp:{:?}, utc_t:{:?}, expect:{:?}",
input, exp, utc_t, expect
);
for_each_tz(move |tz, offset| {
let mut t = Time::parse_datetime(input, UNSPECIFIED_FSP, &tz).unwrap();
t.round_frac(fsp).unwrap();
let expect = Time::parse_datetime(exp, UNSPECIFIED_FSP, &tz).unwrap();
assert_eq!(
t, expect,
"tz:{:?},input:{:?}, exp:{:?}, utc_t:{:?}, expect:{:?}",
offset, input, exp, t, expect
);
});
}
}
#[test]
fn test_set_tp() {
let cases = vec![
("2011-11-11 10:10:10.123456", "2011-11-11"),
(" 2011-11-11 23:59:59", "2011-11-11"),
];
for (s, exp) in cases {
let mut res = Time::parse_utc_datetime(s, UNSPECIFIED_FSP).unwrap();
res.set_time_type(TimeType::Date).unwrap();
res.set_time_type(TimeType::DateTime).unwrap();
let ep = Time::parse_utc_datetime(exp, UNSPECIFIED_FSP).unwrap();
assert_eq!(res, ep);
let res = res.set_time_type(TimeType::Timestamp);
assert!(res.is_err());
}
}
#[test]
fn test_from_duration() {
let cases = vec![("11:30:45.123456"), ("-35:30:46")];
let tz = Tz::utc();
for s in cases {
let d = MyDuration::parse(s.as_bytes(), MAX_FSP).unwrap();
let get = Time::from_duration(&tz, TimeType::DateTime, d).unwrap();
let get_today = get
.time
.checked_sub_signed(Duration::nanoseconds(d.to_nanos()))
.unwrap();
let now = Utc::now();
assert_eq!(get_today.year(), now.year());
assert_eq!(get_today.month(), now.month());
assert_eq!(get_today.day(), now.day());
assert_eq!(get_today.hour(), 0);
assert_eq!(get_today.minute(), 0);
assert_eq!(get_today.second(), 0);
}
}
#[test]
fn test_convert_to_duration() {
let cases = vec![
("2012-12-31 11:30:45.123456", 4, "11:30:45.1235"),
("2012-12-31 11:30:45.123456", 6, "11:30:45.123456"),
("2012-12-31 11:30:45.123456", 0, "11:30:45"),
("2012-12-31 11:30:45.999999", 0, "11:30:46"),
("2017-01-05 08:40:59.575601", 0, "08:41:00"),
("2017-01-05 23:59:59.575601", 0, "00:00:00"),
("0000-00-00 00:00:00", 6, "00:00:00"),
];
for (s, fsp, expect) in cases {
let t = Time::parse_utc_datetime(s, fsp).unwrap();
let du = t.to_duration().unwrap();
let get = du.to_string();
assert_eq!(get, expect);
}
}
#[test]
fn test_date_format() {
let cases = vec![
(
"2010-01-07 23:12:34.12345",
"%b %M %m %c %D %d %e %j %k %h %i %p %r %T %s %f %U %u %V
%v %a %W %w %X %x %Y %y %%",
"Jan January 01 1 7th 07 7 007 23 11 12 PM 11:12:34 PM 23:12:34 34 123450 01 01 01
01 Thu Thursday 4 2010 2010 2010 10 %",
),
(
"2012-12-21 23:12:34.123456",
"%b %M %m %c %D %d %e %j %k %h %i %p %r %T %s %f %U
%u %V %v %a %W %w %X %x %Y %y %%",
"Dec December 12 12 21st 21 21 356 23 11 12 PM 11:12:34 PM 23:12:34 34 123456 51
51 51 51 Fri Friday 5 2012 2012 2012 12 %",
),
(
"0000-01-01 00:00:00.123456",
// Functions week() and yearweek() don't support multi mode,
// so the result of "%U %u %V %Y" is different from MySQL.
"%b %M %m %c %D %d %e %j %k %h %i %p %r %T %s %f %v %Y
%y %%",
"Jan January 01 1 1st 01 1 001 0 12 00 AM 12:00:00 AM 00:00:00 00 123456 52 0000
00 %",
),
(
"2016-09-3 00:59:59.123456",
"abc%b %M %m %c %D %d %e %j %k %h %i %p %r %T %s %f %U
%u %V %v %a %W %w %X %x %Y %y!123 %%xyz %z",
"abcSep September 09 9 3rd 03 3 247 0 12 59 AM 12:59:59 AM 00:59:59 59 123456 35
35 35 35 Sat Saturday 6 2016 2016 2016 16!123 %xyz z",
),
(
"2012-10-01 00:00:00",
"%b %M %m %c %D %d %e %j %k %H %i %p %r %T %s %f %v
%x %Y %y %%",
"Oct October 10 10 1st 01 1 275 0 00 00 AM 12:00:00 AM 00:00:00 00 000000 40
2012 2012 12 %",
),
];
for (s, layout, expect) in cases {
let t = Time::parse_utc_datetime(s, 6).unwrap();
let get = t.date_format(layout).unwrap();
assert_eq!(get, expect);
}
}
#[test]
fn test_chunk_codec() {
let cases = vec![
("2012-12-31 11:30:45.123456", 4),
("2012-12-31 11:30:45.123456", 6),
("2012-12-31 11:30:45.123456", 0),
("2012-12-31 11:30:45.999999", 0),
("2017-01-05 08:40:59.575601", 0),
("2017-01-05 23:59:59.575601", 0),
("0000-00-00 00:00:00", 6),
];
for (s, fsp) in cases {
let t = Time::parse_utc_datetime(s, fsp).unwrap();
let mut buf = vec![];
buf.encode_time(&t).unwrap();
let got = Time::decode(&mut buf.as_slice()).unwrap();
assert_eq!(got, t);
}
}
#[test]
fn test_parse_fsp() {
let cases = vec![
("2012-12-31 11:30:45.1234", 4),
("2012-12-31 11:30:45.123456", 6),
("2012-12-31 11:30:45", 0),
("2012-12-31 11:30:45.", 0),
("2017-01-05 08:40:59.5756014372987", 6),
("2017-01-05 23:59:59....432", 3),
(".1.2.3.4.5.6", 1),
];
for (s, fsp) in cases {
let t = Time::parse_fsp(s);
assert_eq!(fsp, t);
}
}
#[test]
fn test_checked_add_and_sub_duration() {
let cases = vec![
(
"2018-12-30 11:30:45.123456",
"00:00:14.876545",
"2018-12-30 11:31:00.000001",
),
(
"2018-12-30 11:30:45.123456",
"00:30:00",
"2018-12-30 12:00:45.123456",
),
(
"2018-12-30 11:30:45.123456",
"12:30:00",
"2018-12-31 00:00:45.123456",
),
(
"2018-12-30 11:30:45.123456",
"1 12:30:00",
"2019-01-01 00:00:45.123456",
),
];
for (lhs, rhs, exp) in cases.clone() {
let lhs = Time::parse_utc_datetime(lhs, 6).unwrap();
let rhs = MyDuration::parse(rhs.as_bytes(), 6).unwrap();
let res = lhs.checked_add(rhs).unwrap();
let exp = Time::parse_utc_datetime(exp, 6).unwrap();
assert_eq!(res, exp);
}
for (exp, rhs, lhs) in cases {
let lhs = Time::parse_utc_datetime(lhs, 6).unwrap();
let rhs = MyDuration::parse(rhs.as_bytes(), 6).unwrap();
let res = lhs.checked_sub(rhs).unwrap();
let exp = Time::parse_utc_datetime(exp, 6).unwrap();
assert_eq!(res, exp);
}
let lhs = Time::parse_utc_datetime("9999-12-31 23:59:59", 6).unwrap();
let rhs = MyDuration::parse(b"01:00:00", 6).unwrap();
assert_eq!(lhs.checked_add(rhs), None);
let lhs = Time::parse_utc_datetime("0000-01-01 00:00:01", 6).unwrap();
let rhs = MyDuration::parse(b"01:00:00", 6).unwrap();
assert_eq!(lhs.checked_sub(rhs), None);
}
}
| 34.820321 | 100 | 0.454078 |
f8e43c448cead5c8ac994ce2ddbdb3bc8c70929a | 8,994 | use super::CrateVersion;
use serde_json;
use std::path::Path;
use git2::{
build::RepoBuilder, Delta, DiffFormat, Error as GitError, ErrorClass, Object, ObjectType, Oid,
Reference, Repository, Tree,
};
use std::str;
static INDEX_GIT_URL: &str = "https://github.com/rust-lang/crates.io-index";
static LAST_SEEN_REFNAME: &str = "refs/heads/crates-index-diff_last-seen";
static EMPTY_TREE_HASH: &str = "4b825dc642cb6eb9a060e54bf8d69288fbee4904";
static LINE_ADDED_INDICATOR: char = '+';
/// A wrapper for a repository of the crates.io index.
pub struct Index {
/// The name and path of the reference used to keep track of the last seen state of the
/// crates.io repository. The default value is `refs/heads/crates-index-diff_last-seen`.
pub seen_ref_name: &'static str,
/// The crates.io repository.
repo: Repository,
}
/// Options for use in `Index::from_path_or_cloned_with_options`
pub struct CloneOptions {
repository_url: String,
}
impl Index {
/// Return the crates.io repository.
pub fn repository(&self) -> &Repository {
&self.repo
}
/// Return the reference pointing to the state we have seen after calling `fetch_changes()`.
pub fn last_seen_reference(&self) -> Result<Reference, GitError> {
self.repo.find_reference(self.seen_ref_name)
}
/// Return a new `Index` instance from the given `path`, which should contain a bare or non-bare
/// clone of the `crates.io` index.
/// If the directory does not contain the repository or does not exist, it will be cloned from
/// the official location automatically (with complete history).
///
/// An error will occour if the repository exists and the remote URL does not match the given repository URL.
pub fn from_path_or_cloned_with_options(
path: impl AsRef<Path>,
options: CloneOptions,
) -> Result<Index, GitError> {
let mut repo_did_exist = true;
let repo = Repository::open(path.as_ref()).or_else(|err| {
if err.class() == ErrorClass::Repository {
repo_did_exist = false;
RepoBuilder::new()
.bare(true)
.clone(&options.repository_url, path.as_ref())
} else {
Err(err)
}
})?;
if repo_did_exist {
let remote = repo.find_remote("origin")?;
let actual_remote_url = remote
.url()
.ok_or_else(|| GitError::from_str("did not obtain URL of remote named 'origin'"))?;
if actual_remote_url != options.repository_url {
return Err(GitError::from_str(&format!(
"Actual 'origin' remote url {:#?} did not match desired one at {:#?}",
actual_remote_url, options.repository_url
)));
}
}
Ok(Index {
repo,
seen_ref_name: LAST_SEEN_REFNAME,
})
}
/// Return a new `Index` instance from the given `path`, which should contain a bare or non-bare
/// clone of the `crates.io` index.
/// If the directory does not contain the repository or does not exist, it will be cloned from
/// the official location automatically (with complete history).
pub fn from_path_or_cloned(path: impl AsRef<Path>) -> Result<Index, GitError> {
Index::from_path_or_cloned_with_options(
path,
CloneOptions {
repository_url: INDEX_GIT_URL.into(),
},
)
}
/// As `peek_changes_with_options`, but without the options.
pub fn peek_changes(&self) -> Result<(Vec<CrateVersion>, git2::Oid), GitError> {
self.peek_changes_with_options(None)
}
/// Return all `CrateVersion`s that are observed between the last time `fetch_changes(…)` was called
/// and the latest state of the `crates.io` index repository, which is obtained by fetching
/// the remote called `origin`.
/// The `last_seen_reference()` will not be created or updated.
/// The second field in the returned tuple is the commit object to which the changes were provided.
/// If one would set the `last_seen_reference()` to that object, the effect is exactly the same
/// as if `fetch_changes(…)` had been called.
pub fn peek_changes_with_options(
&self,
options: Option<&mut git2::FetchOptions<'_>>,
) -> Result<(Vec<CrateVersion>, git2::Oid), GitError> {
let from = self
.last_seen_reference()
.and_then(|r| {
r.target().ok_or_else(|| {
GitError::from_str("last-seen reference did not have a valid target")
})
})
.or_else(|_| Oid::from_str(EMPTY_TREE_HASH))?;
let to = {
self.repo.find_remote("origin").and_then(|mut r| {
r.fetch(&["refs/heads/*:refs/remotes/origin/*"], options, None)
})?;
let latest_fetched_commit_oid =
self.repo.refname_to_id("refs/remotes/origin/master")?;
latest_fetched_commit_oid
};
Ok((
self.changes_from_objects(
&self.repo.find_object(from, None)?,
&self.repo.find_object(to, None)?,
)?,
to,
))
}
/// As `fetch_changes_with_options`, but without the options.
pub fn fetch_changes(&self) -> Result<Vec<CrateVersion>, GitError> {
self.fetch_changes_with_options(None)
}
/// Return all `CrateVersion`s that are observed between the last time this method was called
/// and the latest state of the `crates.io` index repository, which is obtained by fetching
/// the remote called `origin`.
/// The `last_seen_reference()` will be created or adjusted to point to the latest fetched
/// state, which causes this method to have a different result each time it is called.
pub fn fetch_changes_with_options(
&self,
options: Option<&mut git2::FetchOptions<'_>>,
) -> Result<Vec<CrateVersion>, GitError> {
let (changes, to) = self.peek_changes_with_options(options)?;
self.set_last_seen_reference(to)?;
Ok(changes)
}
/// Set the last seen reference to the given Oid. It will be created if it does not yet exists.
pub fn set_last_seen_reference(&self, to: Oid) -> Result<(), GitError> {
self.last_seen_reference()
.and_then(|mut seen_ref| {
seen_ref.set_target(to, "updating seen-ref head to latest fetched commit")
})
.or_else(|_err| {
self.repo.reference(
self.seen_ref_name,
to,
true,
"creating seen-ref at latest fetched commit",
)
})?;
Ok(())
}
/// Return all `CreateVersion`s observed between `from` and `to`. Both parameter are ref-specs
/// pointing to either a commit or a tree.
/// Learn more about specifying revisions
/// in the
/// [official documentation](https://www.kernel.org/pub/software/scm/git/docs/gitrevisions.html)
pub fn changes(
&self,
from: impl AsRef<str>,
to: impl AsRef<str>,
) -> Result<Vec<CrateVersion>, GitError> {
self.changes_from_objects(
&self.repo.revparse_single(from.as_ref())?,
&self.repo.revparse_single(to.as_ref())?,
)
}
/// Similar to `changes()`, but requires `from` and `to` objects to be provided. They may point
/// to either `Commit`s or `Tree`s.
pub fn changes_from_objects(
&self,
from: &Object,
to: &Object,
) -> Result<Vec<CrateVersion>, GitError> {
fn into_tree<'a>(repo: &'a Repository, obj: &Object) -> Result<Tree<'a>, GitError> {
repo.find_tree(match obj.kind() {
Some(ObjectType::Commit) => obj
.as_commit()
.expect("object of kind commit yields commit")
.tree_id(),
_ =>
/* let it possibly fail later */
{
obj.id()
}
})
}
let diff = self.repo.diff_tree_to_tree(
Some(&into_tree(&self.repo, from)?),
Some(&into_tree(&self.repo, to)?),
None,
)?;
let mut res: Vec<CrateVersion> = Vec::new();
diff.print(DiffFormat::Patch, |delta, _, diffline| {
if diffline.origin() != LINE_ADDED_INDICATOR {
return true;
}
if !match delta.status() {
Delta::Added | Delta::Modified => true,
_ => false,
} {
return true;
}
if let Ok(c) = serde_json::from_slice(diffline.content()) {
res.push(c)
}
true
})
.map(|_| res)
}
}
| 38.27234 | 113 | 0.581832 |
21b495efce2368a35c136b60342bc0613d4dc4ff | 297 | #[doc = "Reader of register CPUIRQSEL1"]
pub type R = crate::R<u32, super::CPUIRQSEL1>;
#[doc = "Reader of field `EV`"]
pub type EV_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:6 - EV"]
#[inline(always)]
pub fn ev(&self) -> EV_R {
EV_R::new((self.bits & 0x7f) as u8)
}
}
| 24.75 | 46 | 0.558923 |
f81c00a8f94b5635aeeefcaf7559b4314a960e5c | 51 | fn main() {
println!("Hello, cargo world!");
}
| 12.75 | 36 | 0.54902 |
8f332646bbccde96b333d7c8f8ecb5c908fe40de | 615 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn foo(x: *~int) -> ~int {
let y = *x; //~ ERROR dereference of unsafe pointer requires unsafe function or block
return y;
}
fn main() {
}
| 34.166667 | 89 | 0.712195 |
23daaeb0f00883d1d5b492a32c88f7a6872e50b3 | 167 | // check-pass
macro_rules! a {
() => { "a" }
}
macro_rules! b {
($doc:expr) => {
#[doc = $doc]
pub struct B;
}
}
b!(a!());
fn main() {}
| 10.4375 | 21 | 0.389222 |
bf8f4f830d9a4913605436927a14091f6d2c0bac | 133,400 | // Copyright 2011 Google Inc. All Rights Reserved.
// Copyright 2017 The Ninja-rs Project Developers. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cell::{Cell, RefCell};
use std::collections::{BTreeMap, BTreeSet, btree_map};
use super::state::State;
use super::deps_log::DepsLog;
use super::build_log::BuildLog;
use super::disk_interface::DiskInterface;
use super::graph::{NodeIndex, EdgeIndex, DependencyScan};
use super::exit_status::ExitStatus;
use super::metrics::Stopwatch;
use super::metrics::get_time_millis;
use super::debug_flags::KEEP_RSP;
use super::timestamp::TimeStamp;
use super::subprocess::SubprocessSet;
use super::utils::{get_load_average, pathbuf_from_bytes};
use super::line_printer::{LinePrinter, LinePrinterLineType};
pub enum EdgeResult {
EdgeFailed,
EdgeSucceeded,
}
/// Plan stores the state of a build plan: what we intend to build,
/// which steps we're ready to execute.
pub struct Plan {
wanted_edges: usize,
command_edges: usize,
/// Keep track of which edges we want to build in this plan. If this map does
/// not contain an entry for an edge, we do not want to build the entry or its
/// dependents. If an entry maps to false, we do not want to build it, but we
/// might want to build one of its dependents. If the entry maps to true, we
/// want to build it.
want: BTreeMap<EdgeIndex, bool>,
ready: BTreeSet<EdgeIndex>,
}
trait IsVacant {
fn is_vacant(&self) -> bool;
}
impl<'a, K, V> IsVacant for btree_map::Entry<'a, K, V> {
fn is_vacant(&self) -> bool {
match self {
&btree_map::Entry::Vacant(_) => true,
_ => false,
}
}
}
impl Plan {
pub fn new() -> Self {
Plan {
wanted_edges: 0usize,
command_edges: 0usize,
want: BTreeMap::new(),
ready: BTreeSet::new(),
}
}
/// Add a target to our plan (including all its dependencies).
/// Returns false if we don't need to build this target; may
/// fill in |err| with an error message if there's a problem.
pub fn add_target(&mut self, state: &State, node: NodeIndex) -> Result<bool, String> {
self.add_sub_target(state, node, None)
}
pub fn add_sub_target(
&mut self,
state: &State,
node_idx: NodeIndex,
dependent: Option<NodeIndex>,
) -> Result<bool, String> {
let node = state.node_state.get_node(node_idx);
let edge_idx = node.in_edge();
if edge_idx.is_none() {
if node.is_dirty() {
let mut err = format!("'{}'", String::from_utf8_lossy(node.path()));
if let Some(dependent) = dependent {
err += &format!(
", needed by '{}',",
String::from_utf8_lossy(state.node_state.get_node(dependent).path())
);
}
err += " missing and no known rule to make it";
return Err(err);
}
return Ok(false);
}
let edge_idx = edge_idx.unwrap();
let edge = state.edge_state.get_edge(edge_idx);
if edge.outputs_ready() {
return Ok(false); // Don't need to do anything.
}
// If an entry in want_ does not already exist for edge, create an entry which
// maps to false, indicating that we do not want to build this entry itself.
let want = self.want.get(&edge_idx).cloned();
let vacant = want.is_none();
if node.is_dirty() && want.unwrap_or(false) == false {
self.want.insert(edge_idx, true);
self.wanted_edges += 1;
if edge.all_inputs_ready(state) {
self.schedule_work(state, edge_idx);
}
if !edge.is_phony() {
self.command_edges += 1;
}
}
if vacant {
for input_node_idx in edge.inputs.iter() {
self.add_sub_target(state, *input_node_idx, Some(node_idx))?;
}
}
return Ok(true);
}
/// Number of edges with commands to run.
fn command_edge_count(&self) -> usize {
return self.command_edges;
}
/// Reset state. Clears want and ready sets.
fn reset(&mut self) {
self.command_edges = 0;
self.wanted_edges = 0;
self.ready.clear();
self.want.clear();
}
/// Returns true if there's more work to be done.
pub fn more_to_do(&self) -> bool {
self.wanted_edges > 0 && self.command_edges > 0
}
/// Submits a ready edge as a candidate for execution.
/// The edge may be delayed from running, for example if it's a member of a
/// currently-full pool.
pub fn schedule_work(&mut self, state: &State, edge_idx: EdgeIndex) {
if self.ready.get(&edge_idx).is_some() {
// This edge has already been scheduled. We can get here again if an edge
// and one of its dependencies share an order-only input, or if a node
// duplicates an out edge (see https://github.com/ninja-build/ninja/pull/519).
// Avoid scheduling the work again.
return;
}
let edge = state.edge_state.get_edge(edge_idx);
let mut pool = edge.pool.borrow_mut();
if pool.should_delay_edge() {
pool.delay_edge(state, edge_idx);
pool.retrieve_ready_edges(state, &mut self.ready);
} else {
pool.edge_scheduled(state, edge_idx);
self.ready.insert(edge_idx);
}
}
// Pop a ready edge off the queue of edges to build.
// Returns NULL if there's no work to do.
pub fn find_work(&mut self) -> Option<EdgeIndex> {
match self.ready.iter().next().cloned() {
Some(idx) => {
self.ready.remove(&idx);
Some(idx)
}
None => None,
}
}
/// Mark an edge as done building (whether it succeeded or failed).
pub fn edge_finished(&mut self, state: &mut State, edge_idx: EdgeIndex, result: EdgeResult) {
let directly_wanted = self.want.get(&edge_idx).unwrap().clone();
{
let edge = state.edge_state.get_edge(edge_idx);
// See if this job frees up any delayed jobs.
if directly_wanted {
edge.pool.borrow_mut().edge_finished(state, edge_idx);
}
edge.pool.borrow_mut().retrieve_ready_edges(
state,
&mut self.ready,
);
}
match result {
EdgeResult::EdgeSucceeded => {
if directly_wanted {
self.wanted_edges -= 1;
}
self.want.remove(&edge_idx);
state.edge_state.get_edge_mut(edge_idx).outputs_ready = true;
// Check off any nodes we were waiting for with this edge.
for output_node_idx in state
.edge_state
.get_edge_mut(edge_idx)
.outputs
.clone()
.into_iter()
{
self.node_finished(state, output_node_idx);
}
}
_ => {}
};
}
pub fn node_finished(&mut self, state: &mut State, node_idx: NodeIndex) {
// See if we we want any edges from this node.
for out_edge_idx in state
.node_state
.get_node(node_idx)
.out_edges()
.to_owned()
.into_iter()
{
let want_e = self.want.get(&out_edge_idx).cloned();
if want_e.is_none() {
continue;
}
{
let oe = state.edge_state.get_edge(out_edge_idx);
if !oe.all_inputs_ready(state) {
continue;
}
}
if want_e.unwrap() {
self.schedule_work(state, out_edge_idx);
} else {
// We do not need to build this edge, but we might need to build one of
// its dependents.
self.edge_finished(state, out_edge_idx, EdgeResult::EdgeSucceeded);
}
}
}
/// Clean the given node during the build.
/// Return false on error.
pub fn clean_node(
&mut self,
scan: &DependencyScan,
State: &State,
node_idx: NodeIndex,
) -> Result<(), String> {
unimplemented!()
}
}
/*
struct Plan {
Plan();
/// Dumps the current state of the plan.
void Dump();
enum EdgeResult {
kEdgeFailed,
kEdgeSucceeded
};
/// Clean the given node during the build.
/// Return false on error.
bool CleanNode(DependencyScan* scan, Node* node, string* err);
/// Reset state. Clears want and ready sets.
void Reset();
private:
bool AddSubTarget(Node* node, Node* dependent, string* err);
void NodeFinished(Node* node);
set<Edge*> ready_;
/// Total number of edges that have commands (not phony).
int command_edges_;
/// Total remaining number of wanted edges.
int wanted_edges_;
};
*/
/// CommandRunner is an interface that wraps running the build
/// subcommands. This allows tests to abstract out running commands.
/// RealCommandRunner is an implementation that actually runs commands.
/// The result of waiting for a command.
pub struct CommandRunnerResult {
pub edge: EdgeIndex,
pub status: ExitStatus,
pub output: Vec<u8>,
}
impl CommandRunnerResult {
fn is_success(&self) -> bool {
match self.status {
ExitStatus::ExitSuccess => true,
_ => false,
}
}
}
pub trait CommandRunner {
fn can_run_more(&self) -> bool;
fn start_command(&mut self, state: &State, edge: EdgeIndex) -> bool;
/// Wait for a command to complete, or return false if interrupted.
fn wait_for_command(&mut self) -> Option<CommandRunnerResult>;
fn get_active_edges(&self) -> Vec<EdgeIndex>;
fn abort(&mut self);
}
pub enum BuildConfigVerbosity {
NORMAL,
QUIET, // No output -- used when testing.
VERBOSE,
}
/// Options (e.g. verbosity, parallelism) passed to a build.
pub struct BuildConfig {
pub verbosity: BuildConfigVerbosity,
pub dry_run: bool,
pub parallelism: usize,
pub failures_allowed: usize,
pub max_load_average: f64,
}
impl BuildConfig {
pub fn new() -> Self {
BuildConfig {
verbosity: BuildConfigVerbosity::NORMAL,
dry_run: false,
parallelism: 1,
failures_allowed: 1,
max_load_average: -0.0f64,
}
}
}
/*
struct BuildConfig {
BuildConfig() : verbosity(NORMAL), dry_run(false), parallelism(1),
failures_allowed(1), max_load_average(-0.0f) {}
enum Verbosity {
NORMAL,
QUIET, // No output -- used when testing.
VERBOSE
};
Verbosity verbosity;
bool dry_run;
int parallelism;
int failures_allowed;
/// The maximum load average we must not exceed. A negative value
/// means that we do not have any limit.
double max_load_average;
};
*/
/// Builder wraps the build process: starting commands, updating status.
pub struct Builder<'s, 'p, 'a, 'b, 'c>
where
's: 'a,
{
state: &'s mut State,
config: &'p BuildConfig,
plan: Plan,
command_runner: Option<Box<CommandRunner + 'p>>,
disk_interface: &'c DiskInterface,
scan: DependencyScan<'s, 'a, 'b, 'c>,
status: BuildStatus<'p>,
}
impl<'s, 'p, 'a, 'b, 'c> Builder<'s, 'p, 'a, 'b, 'c>
where
's: 'a,
{
pub fn new(
state: &'s mut State,
config: &'p BuildConfig,
build_log: &'a BuildLog<'s>,
deps_log: &'b DepsLog,
disk_interface: &'c DiskInterface,
) -> Self {
Builder {
state,
config,
plan: Plan::new(),
command_runner: None,
disk_interface,
scan: DependencyScan::new(build_log, deps_log, disk_interface),
status: BuildStatus::new(config),
}
}
/// Add a target to the build, scanning dependencies.
/// @return false on error.
pub fn add_target(&mut self, node_idx: NodeIndex) -> Result<(), String> {
self.scan.recompute_dirty(self.state, node_idx)?;
if let Some(in_edge) = self.state.node_state.get_node(node_idx).in_edge() {
if self.state.edge_state.get_edge(in_edge).outputs_ready() {
return Ok(()); // Nothing to do.
}
}
self.plan.add_target(self.state, node_idx)?;
Ok(())
}
/// Returns true if the build targets are already up to date.
pub fn is_already_up_to_date(&mut self) -> bool {
!self.plan.more_to_do()
}
/// Run the build. Returns false on error.
/// It is an error to call this function when AlreadyUpToDate() is true.
pub fn build(&mut self) -> Result<(), String> {
assert!(!self.is_already_up_to_date());
self.status.plan_has_total_edges(
self.plan.command_edge_count(),
);
let mut pending_commands = 0;
let mut failures_allowed = self.config.failures_allowed;
// Set up the command runner if we haven't done so already.
let config = self.config;
if self.command_runner.is_none() {
self.command_runner = Some(if config.dry_run {
Box::new(DryRunCommandRunner::new())
} else {
Box::new(RealCommandRunner::new(config))
});
}
// We are about to start the build process.
self.status.build_started();
// This main loop runs the entire build process.
// It is structured like this:
// First, we attempt to start as many commands as allowed by the
// command runner.
// Second, we attempt to wait for / reap the next finished command.
while self.plan.more_to_do() {
// See if we can start any more commands.
if failures_allowed > 0 && self.command_runner.as_ref().unwrap().can_run_more() {
if let Some(edge_idx) = self.plan.find_work() {
if let Err(e) = self.start_edge(edge_idx) {
self.cleanup();
self.status.build_finished();
return Err(e);
};
if self.state.edge_state.get_edge(edge_idx).is_phony() {
self.plan.edge_finished(
self.state,
edge_idx,
EdgeResult::EdgeSucceeded,
);
} else {
pending_commands += 1;
}
// We made some progress; go back to the main loop.
continue;
}
}
// See if we can reap any finished commands.
if pending_commands > 0 {
let result = self.command_runner.as_mut().unwrap().wait_for_command();
if result.is_none() ||
result.as_ref().unwrap().status == ExitStatus::ExitInterrupted
{
self.cleanup();
self.status.build_finished();
return Err("interrupted by user".to_owned());
}
pending_commands -= 1;
let result = self.finish_command(result.unwrap());
if let Err(e) = result {
self.cleanup();
self.status.build_finished();
return Err(e);
}
let result = result.unwrap();
if !result.is_success() {
if failures_allowed > 0 {
failures_allowed -= 1;
}
}
// We made some progress; start the main loop over.
continue;
}
// If we get here, we cannot make any more progress.
self.status.build_finished();
return match failures_allowed {
0 if config.failures_allowed > 1 => Err("subcommands failed".to_owned()),
0 => Err("subcommand failed".to_owned()),
_ if failures_allowed < self.config.failures_allowed => Err(
"cannot make progress due to previous errors"
.to_owned(),
),
_ => Err("stuck [this is a bug]".to_owned()),
};
}
self.status.build_finished();
return Ok(());
}
fn start_edge(&mut self, edge_idx: EdgeIndex) -> Result<(), String> {
metric_record!("StartEdge");
let edge = self.state.edge_state.get_edge(edge_idx);
if edge.is_phony() {
return Ok(());
}
self.status.build_edge_started(self.state, edge_idx);
// Create directories necessary for outputs.
// XXX: this will block; do we care?
for out_idx in edge.outputs.iter() {
let path = pathbuf_from_bytes(
self.state.node_state.get_node(*out_idx).path().to_owned(),
).map_err(|e| {
format!("invalid utf-8 filename: {}", String::from_utf8_lossy(&e))
})?;
if let Some(parent) = path.parent() {
self.disk_interface.make_dirs(parent).map_err(
|e| format!("{}", e),
)?;
}
}
// Create response file, if needed
// XXX: this may also block; do we care?
let rspfile = edge.get_unescaped_rspfile(&self.state.node_state);
if !rspfile.as_ref().is_empty() {
let content = edge.get_binding(&self.state.node_state, b"rspfile_content");
let rspfile_path = pathbuf_from_bytes(rspfile.into_owned()).map_err(|e| {
format!("invalid utf-8 filename: {}", String::from_utf8_lossy(&e))
})?;
self.disk_interface
.write_file(&rspfile_path, content.as_ref())
.map_err(|_| String::new())?;
}
// start command computing and run it
if !self.command_runner.as_mut().unwrap().start_command(
self.state,
edge_idx,
)
{
return Err(format!(
"command '{}' failed.",
String::from_utf8_lossy(
&edge.evaluate_command(&self.state.node_state),
)
));
}
Ok(())
}
/// Update status ninja logs following a command termination.
/// @return false if the build can not proceed further due to a fatal error.
fn finish_command(
&mut self,
mut result: CommandRunnerResult,
) -> Result<CommandRunnerResult, String> {
use errno;
metric_record!("FinishCommand");
let edge_idx = result.edge;
// First try to extract dependencies from the result, if any.
// This must happen first as it filters the command output (we want
// to filter /showIncludes output, even on compile failure) and
// extraction itself can fail, which makes the command fail from a
// build perspective.
let mut deps_nodes = Vec::new();
let (deps_type, deps_prefix) = {
let edge = self.state.edge_state.get_edge(edge_idx);
let deps_type = edge.get_binding(&self.state.node_state, b"deps");
let deps_prefix = edge.get_binding(&self.state.node_state, b"msvc_deps_prefix");
(deps_type.into_owned(), deps_prefix.into_owned())
};
if !deps_type.is_empty() {
match self.extract_deps(&mut result, deps_type.as_ref(), deps_prefix.as_ref()) {
Ok(n) => {
deps_nodes = n;
}
Err(e) => {
if result.is_success() {
if !result.output.is_empty() {
result.output.extend_from_slice(b"\n".as_ref());
}
result.output.extend_from_slice(e.as_bytes());
result.status = ExitStatus::ExitFailure;
}
}
}
}
let (start_time, end_time) = self.status.build_edge_finished(
self.state,
edge_idx,
result.is_success(),
&result.output,
);
if !result.is_success() {
self.plan.edge_finished(
self.state,
edge_idx,
EdgeResult::EdgeFailed,
);
return Ok(result);
}
// The rest of this function only applies to successful commands.
// Restat the edge outputs
let mut output_mtime = TimeStamp(0);
let restat = self.state.edge_state.get_edge(edge_idx).get_binding_bool(
&self.state
.node_state,
b"restat",
);
if !self.config.dry_run {
let edge = self.state.edge_state.get_edge(edge_idx);
let mut node_cleaned = false;
for o_node_idx in edge.outputs.iter() {
let o_node = self.state.node_state.get_node(*o_node_idx);
let path = pathbuf_from_bytes(o_node.path().to_owned()).map_err(|e| {
format!("Invalid utf-8 pathname {}", String::from_utf8_lossy(&e))
})?;
let new_mtime = self.disk_interface.stat(&path)?;
if new_mtime > output_mtime {
output_mtime = new_mtime;
}
if o_node.mtime() == new_mtime && restat {
// The rule command did not change the output. Propagate the clean
// state through the build graph.
// Note that this also applies to nonexistent outputs (mtime == 0).
self.plan.clean_node(&self.scan, self.state, *o_node_idx)?;
node_cleaned = true;
}
}
if node_cleaned {
let mut restat_mtime = TimeStamp(0);
// If any output was cleaned, find the most recent mtime of any
// (existing) non-order-only input or the depfile.
for i_idx in edge.inputs[edge.non_order_only_deps_range()].iter() {
let path = pathbuf_from_bytes(
self.state.node_state.get_node(*i_idx).path().to_owned(),
).map_err(|e| {
format!("invalid utf-8 filename: {}", String::from_utf8_lossy(&e))
})?;
let input_mtime = self.disk_interface.stat(&path)?;
if input_mtime > restat_mtime {
restat_mtime = input_mtime;
}
}
let depfile = edge.get_unescaped_depfile(&self.state.node_state);
if restat_mtime.0 != 0 && deps_type.is_empty() && !depfile.is_empty() {
let path = pathbuf_from_bytes(depfile.into_owned()).map_err(|e| {
format!("invalid utf-8 filename: {}", String::from_utf8_lossy(&e))
})?;
let depfile_mtime = self.disk_interface.stat(&path)?;
if depfile_mtime > restat_mtime {
restat_mtime = depfile_mtime;
}
}
// The total number of edges in the plan may have changed as a result
// of a restat.
self.status.plan_has_total_edges(
self.plan.command_edge_count(),
);
output_mtime = restat_mtime;
}
}
self.plan.edge_finished(
self.state,
edge_idx,
EdgeResult::EdgeSucceeded,
);
let edge = self.state.edge_state.get_edge(edge_idx);
let rspfile = edge.get_unescaped_rspfile(&self.state.node_state);
if !rspfile.is_empty() && !KEEP_RSP {
if let Ok(path) = pathbuf_from_bytes(rspfile.into_owned()) {
let _ = self.disk_interface.remove_file(&path);
};
}
if let Some(build_log) = self.scan.build_log() {
build_log
.record_command(self.state, edge_idx, start_time, end_time, output_mtime)
.map_err(|e| {
format!("Error writing to build log: {}", errno::errno())
})?;
}
if !deps_type.is_empty() && !self.config.dry_run {
assert!(edge.outputs.len() == 1);
//or it should have been rejected by parser.
let out_idx = edge.outputs[0];
let out = self.state.node_state.get_node(out_idx);
let path = pathbuf_from_bytes(out.path().to_owned()).map_err(|e| {
format!("Invalid utf-8 pathname {}", String::from_utf8_lossy(&e))
})?;
let deps_mtime = self.disk_interface.stat(&path)?;
self.scan
.deps_log()
.record_deps(self.state, out_idx, deps_mtime, &deps_nodes)
.map_err(|e| format!("Error writing to deps log: {}", errno::errno()))?;
}
Ok(result)
}
/// Clean up after interrupted commands by deleting output files.
pub fn cleanup(&mut self) {
if self.command_runner.is_none() {
return;
}
let command_runner = self.command_runner.as_mut().unwrap();
let active_edges = command_runner.get_active_edges();
command_runner.abort();
for edge_idx in active_edges.into_iter() {
let edge = self.state.edge_state.get_edge(edge_idx);
let depfile = edge.get_unescaped_depfile(&self.state.node_state)
.into_owned();
for out_idx in edge.outputs.iter() {
// Only delete this output if it was actually modified. This is
// important for things like the generator where we don't want to
// delete the manifest file if we can avoid it. But if the rule
// uses a depfile, always delete. (Consider the case where we
// need to rebuild an output because of a modified header file
// mentioned in a depfile, and the command touches its depfile
// but is interrupted before it touches its output file.)
let out_node = self.state.node_state.get_node(*out_idx);
match pathbuf_from_bytes(out_node.path().to_owned()) {
Err(e) => {
error!("invalid utf-8 filename: {}", String::from_utf8_lossy(&e));
}
Ok(path) => {
match self.disk_interface.stat(&path) {
Err(e) => {
error!("{}", e);
}
Ok(new_mtime) => {
if !depfile.is_empty() || out_node.mtime() != new_mtime {
let _ = self.disk_interface.remove_file(&path);
}
}
}
}
}
}
if !depfile.is_empty() {
match pathbuf_from_bytes(depfile) {
Err(e) => {
error!("invalid utf-8 filename: {}", String::from_utf8_lossy(&e));
}
Ok(path) => {
let _ = self.disk_interface.remove_file(&path);
}
};
}
}
}
fn extract_deps(
&self,
result: &mut CommandRunnerResult,
deps_type: &[u8],
deps_prefix: &[u8],
) -> Result<Vec<NodeIndex>, String> {
if deps_type == b"msvc" {
/*
CLParser parser;
string output;
if (!parser.Parse(result->output, deps_prefix, &output, err))
return false;
result->output = output;
for (set<string>::iterator i = parser.includes_.begin();
i != parser.includes_.end(); ++i) {
// ~0 is assuming that with MSVC-parsed headers, it's ok to always make
// all backslashes (as some of the slashes will certainly be backslashes
// anyway). This could be fixed if necessary with some additional
// complexity in IncludesNormalize::Relativize.
deps_nodes->push_back(state_->GetNode(*i, ~0u));
}
*/
return Ok(Vec::new());
unimplemented!{}
} else if deps_type == b"gcc" {
/*
string depfile = result->edge->GetUnescapedDepfile();
if (depfile.empty()) {
*err = string("edge with deps=gcc but no depfile makes no sense");
return false;
}
// Read depfile content. Treat a missing depfile as empty.
string content;
switch (disk_interface_->ReadFile(depfile, &content, err)) {
case DiskInterface::Okay:
break;
case DiskInterface::NotFound:
err->clear();
break;
case DiskInterface::OtherError:
return false;
}
if (content.empty())
return true;
DepfileParser deps;
if (!deps.Parse(&content, err))
return false;
// XXX check depfile matches expected output.
deps_nodes->reserve(deps.ins_.size());
for (vector<StringPiece>::iterator i = deps.ins_.begin();
i != deps.ins_.end(); ++i) {
uint64_t slash_bits;
if (!CanonicalizePath(const_cast<char*>(i->str_), &i->len_, &slash_bits,
err))
return false;
deps_nodes->push_back(state_->GetNode(*i, slash_bits));
}
if (!g_keep_depfile) {
if (disk_interface_->RemoveFile(depfile) < 0) {
*err = string("deleting depfile: ") + strerror(errno) + string("\n");
return false;
}
}
*/
return Ok(Vec::new());
unimplemented!{}
} else {
fatal!("unknown deps type '{}'", String::from_utf8_lossy(deps_type));
unreachable!();
}
}
}
impl<'s, 'p, 'a, 'b, 'c> Drop for Builder<'s, 'p, 'a, 'b, 'c> {
fn drop(&mut self) {
self.cleanup();
}
}
/*
struct Builder {
Builder(State* state, const BuildConfig& config,
BuildLog* build_log, DepsLog* deps_log,
DiskInterface* disk_interface);
~Builder();
/// Clean up after interrupted commands by deleting output files.
void Cleanup();
Node* AddTarget(const string& name, string* err);
/// Used for tests.
void SetBuildLog(BuildLog* log) {
scan_.set_build_log(log);
}
State* state_;
const BuildConfig& config_;
Plan plan_;
auto_ptr<CommandRunner> command_runner_;
BuildStatus* status_;
private:
bool ExtractDeps(CommandRunner::Result* result, const string& deps_type,
const string& deps_prefix, vector<Node*>* deps_nodes,
string* err);
DiskInterface* disk_interface_;
DependencyScan scan_;
// Unimplemented copy ctor and operator= ensure we don't copy the auto_ptr.
Builder(const Builder &other); // DO NOT IMPLEMENT
void operator=(const Builder &other); // DO NOT IMPLEMENT
};
*/
enum BuildStatusEdgeStatus {
EdgeStarted,
EdgeFinished,
}
/// Tracks the status of a build: completion fraction, printing updates.
struct BuildStatus<'a> {
config: &'a BuildConfig,
/// Time the build started.
start_time_millis: u64,
started_edges: usize,
running_edges: BTreeMap<EdgeIndex, u64>,
/// Map of running edge to time the edge started running.
finished_edges: usize,
total_edges: usize,
/// The custom progress status format to use.
progress_status_format: Vec<u8>,
/// Prints progress output.
printer: LinePrinter,
overall_rate: RefCell<RateInfo>,
current_rate: RefCell<SlidingRateInfo>,
}
impl<'a> BuildStatus<'a> {
pub fn new(config: &'a BuildConfig) -> Self {
let v = BuildStatus {
config,
start_time_millis: get_time_millis(),
started_edges: 0,
running_edges: BTreeMap::new(),
finished_edges: 0,
total_edges: 0,
progress_status_format: Vec::new(), // TODO
printer: LinePrinter::new(),
overall_rate: RefCell::new(RateInfo::new()),
current_rate: RefCell::new(SlidingRateInfo::new(config.parallelism)),
};
return v;
unimplemented!{}
}
pub fn plan_has_total_edges(&mut self, total: usize) {
self.total_edges = total;
}
pub fn build_started(&mut self) {
self.overall_rate.borrow_mut().restart();
self.current_rate.borrow_mut().restart();
}
pub fn build_finished(&mut self) {
self.printer.set_console_locked(false);
self.printer.print_on_new_line(b"");
}
pub fn build_edge_started(&mut self, state: &State, edge_idx: EdgeIndex) {
let start_time = get_time_millis() - self.start_time_millis;
self.running_edges.insert(edge_idx, start_time);
self.started_edges += 1;
let edge_use_console = state.edge_state.get_edge(edge_idx).use_console();
if edge_use_console || self.printer.is_smart_terminal() {
self.print_status(state, edge_idx, BuildStatusEdgeStatus::EdgeStarted);
}
if edge_use_console {
self.printer.set_console_locked(true);
}
}
pub fn build_edge_finished(
&mut self,
state: &State,
edge_idx: EdgeIndex,
success: bool,
output: &[u8],
) -> (u64, u64) {
let now = get_time_millis();
self.finished_edges += 1;
let start_time = self.running_edges.remove(&edge_idx).unwrap();
let end_time = now - self.start_time_millis;
if state.edge_state.get_edge(edge_idx).use_console() {
self.printer.set_console_locked(false);
}
match self.config.verbosity {
BuildConfigVerbosity::QUIET => {
return (start_time, end_time);
}
_ => {}
};
/*
if (!edge->use_console())
PrintStatus(edge, kEdgeFinished);
// Print the command that is spewing before printing its output.
if (!success) {
string outputs;
for (vector<Node*>::const_iterator o = edge->outputs_.begin();
o != edge->outputs_.end(); ++o)
outputs += (*o)->path() + " ";
printer_.PrintOnNewLine("FAILED: " + outputs + "\n");
printer_.PrintOnNewLine(edge->EvaluateCommand() + "\n");
}
if (!output.empty()) {
// ninja sets stdout and stderr of subprocesses to a pipe, to be able to
// check if the output is empty. Some compilers, e.g. clang, check
// isatty(stderr) to decide if they should print colored output.
// To make it possible to use colored output with ninja, subprocesses should
// be run with a flag that forces them to always print color escape codes.
// To make sure these escape codes don't show up in a file if ninja's output
// is piped to a file, ninja strips ansi escape codes again if it's not
// writing to a |smart_terminal_|.
// (Launching subprocesses in pseudo ttys doesn't work because there are
// only a few hundred available on some systems, and ninja can launch
// thousands of parallel compile commands.)
// TODO: There should be a flag to disable escape code stripping.
string final_output;
if (!printer_.is_smart_terminal())
final_output = StripAnsiEscapeCodes(output);
else
final_output = output;
#ifdef _WIN32
// Fix extra CR being added on Windows, writing out CR CR LF (#773)
_setmode(_fileno(stdout), _O_BINARY); // Begin Windows extra CR fix
#endif
printer_.PrintOnNewLine(final_output);
#ifdef _WIN32
_setmode(_fileno(stdout), _O_TEXT); // End Windows extra CR fix
#endif
}
*/
return (start_time, end_time);
unimplemented!();
}
/// Format the progress status string by replacing the placeholders.
/// See the user manual for more information about the available
/// placeholders.
/// @param progress_status_format The format of the progress status.
/// @param status The status of the edge.
pub fn format_progress_status(
progress_status_format: &[u8],
status: BuildStatusEdgeStatus,
) -> Vec<u8> {
return Vec::new();
unimplemented!()
}
fn print_status(&self, state: &State, edge_idx: EdgeIndex, status: BuildStatusEdgeStatus) {
let force_full_command = match self.config.verbosity {
BuildConfigVerbosity::QUIET => {
return;
}
BuildConfigVerbosity::VERBOSE => true,
BuildConfigVerbosity::NORMAL => false,
};
let edge = state.edge_state.get_edge(edge_idx);
let mut desc_or_cmd = edge.get_binding(&state.node_state, b"description");
if desc_or_cmd.is_empty() || force_full_command {
desc_or_cmd = edge.get_binding(&state.node_state, b"command");
}
let mut to_print = Self::format_progress_status(&self.progress_status_format, status);
to_print.extend_from_slice(&desc_or_cmd);
let ty = if force_full_command {
LinePrinterLineType::Full
} else {
LinePrinterLineType::Elide
};
self.printer.print(&to_print, ty);
}
}
/*
struct BuildStatus {
explicit BuildStatus(const BuildConfig& config);
void PlanHasTotalEdges(int total);
void BuildEdgeStarted(Edge* edge);
void BuildEdgeFinished(Edge* edge, bool success, const string& output,
int* start_time, int* end_time);
void BuildStarted();
void BuildFinished();
private:
void PrintStatus(Edge* edge, EdgeStatus status);
template<size_t S>
void SnprintfRate(double rate, char(&buf)[S], const char* format) const {
if (rate == -1)
snprintf(buf, S, "?");
else
snprintf(buf, S, format, rate);
}
*/
struct RateInfo {
rate: f64,
stopwatch: Stopwatch,
}
impl RateInfo {
pub fn new() -> Self {
RateInfo {
rate: -1f64,
stopwatch: Stopwatch::new(),
}
}
pub fn restart(&mut self) {
self.stopwatch.restart()
}
}
/*
struct RateInfo {
RateInfo() : rate_(-1) {}
void Restart() { stopwatch_.Restart(); }
double Elapsed() const { return stopwatch_.Elapsed(); }
double rate() { return rate_; }
void UpdateRate(int edges) {
if (edges && stopwatch_.Elapsed())
rate_ = edges / stopwatch_.Elapsed();
}
private:
double rate_;
Stopwatch stopwatch_;
};
*/
struct SlidingRateInfo {
rate: f64,
stopwatch: Stopwatch,
max_len: usize,
times: VecDeque<f64>,
last_update: isize,
}
impl SlidingRateInfo {
pub fn new(n: usize) -> Self {
SlidingRateInfo {
rate: -1.0f64,
stopwatch: Stopwatch::new(),
max_len: n,
times: VecDeque::new(),
last_update: -1,
}
}
pub fn restart(&mut self) {
self.stopwatch.restart();
}
}
/*
struct SlidingRateInfo {
SlidingRateInfo(int n) : rate_(-1), N(n), last_update_(-1) {}
void Restart() { stopwatch_.Restart(); }
double rate() { return rate_; }
void UpdateRate(int update_hint) {
if (update_hint == last_update_)
return;
last_update_ = update_hint;
if (times_.size() == N)
times_.pop();
times_.push(stopwatch_.Elapsed());
if (times_.back() != times_.front())
rate_ = times_.size() / (times_.back() - times_.front());
}
private:
double rate_;
Stopwatch stopwatch_;
const size_t N;
queue<double> times_;
int last_update_;
};
mutable RateInfo overall_rate_;
mutable SlidingRateInfo current_rate_;
};
#endif // NINJA_BUILD_H_
*/
/*
// Copyright 2011 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "build.h"
#include <assert.h>
#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#include <functional>
#ifdef _WIN32
#include <fcntl.h>
#include <io.h>
#endif
#if defined(__SVR4) && defined(__sun)
#include <sys/termios.h>
#endif
#include "build_log.h"
#include "clparser.h"
#include "debug_flags.h"
#include "depfile_parser.h"
#include "deps_log.h"
#include "disk_interface.h"
#include "graph.h"
#include "state.h"
#include "subprocess.h"
#include "util.h"
*/
use std::collections::VecDeque;
struct DryRunCommandRunner {
finished: VecDeque<EdgeIndex>,
}
impl DryRunCommandRunner {
pub fn new() -> Self {
DryRunCommandRunner { finished: VecDeque::new() }
}
}
impl CommandRunner for DryRunCommandRunner {
fn can_run_more(&self) -> bool {
true
}
fn start_command(&mut self, _: &State, edge: EdgeIndex) -> bool {
self.finished.push_back(edge);
true
}
fn wait_for_command(&mut self) -> Option<CommandRunnerResult> {
match self.finished.pop_front() {
None => None,
Some(e) => Some(CommandRunnerResult {
edge: e,
status: ExitStatus::ExitSuccess,
output: Vec::new(),
}),
}
}
fn get_active_edges(&self) -> Vec<EdgeIndex> {
Vec::new()
}
fn abort(&mut self) {
//do nothing
}
}
/*
BuildStatus::BuildStatus(const BuildConfig& config)
: config_(config),
start_time_millis_(GetTimeMillis()),
started_edges_(0), finished_edges_(0), total_edges_(0),
progress_status_format_(NULL),
overall_rate_(), current_rate_(config.parallelism) {
// Don't do anything fancy in verbose mode.
if (config_.verbosity != BuildConfig::NORMAL)
printer_.set_smart_terminal(false);
progress_status_format_ = getenv("NINJA_STATUS");
if (!progress_status_format_)
progress_status_format_ = "[%f/%t] ";
}
void BuildStatus::PlanHasTotalEdges(int total) {
total_edges_ = total;
}
void BuildStatus::BuildEdgeStarted(Edge* edge) {
int start_time = (int)(GetTimeMillis() - start_time_millis_);
running_edges_.insert(make_pair(edge, start_time));
++started_edges_;
if (edge->use_console() || printer_.is_smart_terminal())
PrintStatus(edge, kEdgeStarted);
if (edge->use_console())
printer_.SetConsoleLocked(true);
}
void BuildStatus::BuildEdgeFinished(Edge* edge,
bool success,
const string& output,
int* start_time,
int* end_time) {
int64_t now = GetTimeMillis();
++finished_edges_;
RunningEdgeMap::iterator i = running_edges_.find(edge);
*start_time = i->second;
*end_time = (int)(now - start_time_millis_);
running_edges_.erase(i);
if (edge->use_console())
printer_.SetConsoleLocked(false);
if (config_.verbosity == BuildConfig::QUIET)
return;
if (!edge->use_console())
PrintStatus(edge, kEdgeFinished);
// Print the command that is spewing before printing its output.
if (!success) {
string outputs;
for (vector<Node*>::const_iterator o = edge->outputs_.begin();
o != edge->outputs_.end(); ++o)
outputs += (*o)->path() + " ";
printer_.PrintOnNewLine("FAILED: " + outputs + "\n");
printer_.PrintOnNewLine(edge->EvaluateCommand() + "\n");
}
if (!output.empty()) {
// ninja sets stdout and stderr of subprocesses to a pipe, to be able to
// check if the output is empty. Some compilers, e.g. clang, check
// isatty(stderr) to decide if they should print colored output.
// To make it possible to use colored output with ninja, subprocesses should
// be run with a flag that forces them to always print color escape codes.
// To make sure these escape codes don't show up in a file if ninja's output
// is piped to a file, ninja strips ansi escape codes again if it's not
// writing to a |smart_terminal_|.
// (Launching subprocesses in pseudo ttys doesn't work because there are
// only a few hundred available on some systems, and ninja can launch
// thousands of parallel compile commands.)
// TODO: There should be a flag to disable escape code stripping.
string final_output;
if (!printer_.is_smart_terminal())
final_output = StripAnsiEscapeCodes(output);
else
final_output = output;
#ifdef _WIN32
// Fix extra CR being added on Windows, writing out CR CR LF (#773)
_setmode(_fileno(stdout), _O_BINARY); // Begin Windows extra CR fix
#endif
printer_.PrintOnNewLine(final_output);
#ifdef _WIN32
_setmode(_fileno(stdout), _O_TEXT); // End Windows extra CR fix
#endif
}
}
void BuildStatus::BuildStarted() {
overall_rate_.Restart();
current_rate_.Restart();
}
void BuildStatus::BuildFinished() {
printer_.SetConsoleLocked(false);
printer_.PrintOnNewLine("");
}
string BuildStatus::FormatProgressStatus(
const char* progress_status_format, EdgeStatus status) const {
string out;
char buf[32];
int percent;
for (const char* s = progress_status_format; *s != '\0'; ++s) {
if (*s == '%') {
++s;
switch (*s) {
case '%':
out.push_back('%');
break;
// Started edges.
case 's':
snprintf(buf, sizeof(buf), "%d", started_edges_);
out += buf;
break;
// Total edges.
case 't':
snprintf(buf, sizeof(buf), "%d", total_edges_);
out += buf;
break;
// Running edges.
case 'r': {
int running_edges = started_edges_ - finished_edges_;
// count the edge that just finished as a running edge
if (status == kEdgeFinished)
running_edges++;
snprintf(buf, sizeof(buf), "%d", running_edges);
out += buf;
break;
}
// Unstarted edges.
case 'u':
snprintf(buf, sizeof(buf), "%d", total_edges_ - started_edges_);
out += buf;
break;
// Finished edges.
case 'f':
snprintf(buf, sizeof(buf), "%d", finished_edges_);
out += buf;
break;
// Overall finished edges per second.
case 'o':
overall_rate_.UpdateRate(finished_edges_);
SnprintfRate(overall_rate_.rate(), buf, "%.1f");
out += buf;
break;
// Current rate, average over the last '-j' jobs.
case 'c':
current_rate_.UpdateRate(finished_edges_);
SnprintfRate(current_rate_.rate(), buf, "%.1f");
out += buf;
break;
// Percentage
case 'p':
percent = (100 * finished_edges_) / total_edges_;
snprintf(buf, sizeof(buf), "%3i%%", percent);
out += buf;
break;
case 'e': {
double elapsed = overall_rate_.Elapsed();
snprintf(buf, sizeof(buf), "%.3f", elapsed);
out += buf;
break;
}
default:
Fatal("unknown placeholder '%%%c' in $NINJA_STATUS", *s);
return "";
}
} else {
out.push_back(*s);
}
}
return out;
}
void BuildStatus::PrintStatus(Edge* edge, EdgeStatus status) {
if (config_.verbosity == BuildConfig::QUIET)
return;
bool force_full_command = config_.verbosity == BuildConfig::VERBOSE;
string to_print = edge->GetBinding("description");
if (to_print.empty() || force_full_command)
to_print = edge->GetBinding("command");
to_print = FormatProgressStatus(progress_status_format_, status) + to_print;
printer_.Print(to_print,
force_full_command ? LinePrinter::FULL : LinePrinter::ELIDE);
}
Plan::Plan() : command_edges_(0), wanted_edges_(0) {}
bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) {
node->set_dirty(false);
for (vector<Edge*>::const_iterator oe = node->out_edges().begin();
oe != node->out_edges().end(); ++oe) {
// Don't process edges that we don't actually want.
map<Edge*, bool>::iterator want_e = want_.find(*oe);
if (want_e == want_.end() || !want_e->second)
continue;
// Don't attempt to clean an edge if it failed to load deps.
if ((*oe)->deps_missing_)
continue;
// If all non-order-only inputs for this edge are now clean,
// we might have changed the dirty state of the outputs.
vector<Node*>::iterator
begin = (*oe)->inputs_.begin(),
end = (*oe)->inputs_.end() - (*oe)->order_only_deps_;
if (find_if(begin, end, mem_fun(&Node::dirty)) == end) {
// Recompute most_recent_input.
Node* most_recent_input = NULL;
for (vector<Node*>::iterator i = begin; i != end; ++i) {
if (!most_recent_input || (*i)->mtime() > most_recent_input->mtime())
most_recent_input = *i;
}
// Now, this edge is dirty if any of the outputs are dirty.
// If the edge isn't dirty, clean the outputs and mark the edge as not
// wanted.
bool outputs_dirty = false;
if (!scan->RecomputeOutputsDirty(*oe, most_recent_input,
&outputs_dirty, err)) {
return false;
}
if (!outputs_dirty) {
for (vector<Node*>::iterator o = (*oe)->outputs_.begin();
o != (*oe)->outputs_.end(); ++o) {
if (!CleanNode(scan, *o, err))
return false;
}
want_e->second = false;
--wanted_edges_;
if (!(*oe)->is_phony())
--command_edges_;
}
}
}
return true;
}
void Plan::Dump() {
printf("pending: %d\n", (int)want_.size());
for (map<Edge*, bool>::iterator e = want_.begin(); e != want_.end(); ++e) {
if (e->second)
printf("want ");
e->first->Dump();
}
printf("ready: %d\n", (int)ready_.size());
}
*/
struct RealCommandRunner<'a> {
config: &'a BuildConfig,
subprocs: SubprocessSet<EdgeIndex>,
}
impl<'a> RealCommandRunner<'a> {
pub fn new(config: &'a BuildConfig) -> Self {
RealCommandRunner {
config,
subprocs: SubprocessSet::new(),
}
}
}
impl<'a> CommandRunner for RealCommandRunner<'a> {
fn can_run_more(&self) -> bool {
let subproc_number = self.subprocs.running().len() + self.subprocs.finished().len();
if subproc_number >= self.config.parallelism {
return false;
}
if self.subprocs.running().is_empty() {
return true;
}
if self.config.max_load_average <= 0.0f64 {
return true;
}
if get_load_average().unwrap_or(-0.0f64) < self.config.max_load_average {
return true;
}
return false;
}
fn start_command(&mut self, state: &State, edge_idx: EdgeIndex) -> bool {
let edge = state.edge_state.get_edge(edge_idx);
let command = edge.evaluate_command(&state.node_state);
return self.subprocs
.add(&command, edge.use_console(), edge_idx)
.is_some();
}
fn wait_for_command(&mut self) -> Option<CommandRunnerResult> {
let (mut subproc, edge_idx) = loop {
if let Some(next_finished) = self.subprocs.next_finished() {
break next_finished;
}
if self.subprocs.do_work().is_err() {
//interrupted
return None;
}
};
let status = subproc.finish();
let output = subproc.output().to_owned();
Some(CommandRunnerResult {
status,
output,
edge: edge_idx,
})
}
fn get_active_edges(&self) -> Vec<EdgeIndex> {
self.subprocs.iter().map(|x| x.1).collect()
}
fn abort(&mut self) {
self.subprocs.clear();
}
}
/*
struct RealCommandRunner : public CommandRunner {
explicit RealCommandRunner(const BuildConfig& config) : config_(config) {}
virtual ~RealCommandRunner() {}
virtual bool CanRunMore();
virtual bool StartCommand(Edge* edge);
virtual bool WaitForCommand(Result* result);
virtual vector<Edge*> GetActiveEdges();
virtual void Abort();
const BuildConfig& config_;
SubprocessSet subprocs_;
map<Subprocess*, Edge*> subproc_to_edge_;
};
bool RealCommandRunner::CanRunMore() {
size_t subproc_number =
subprocs_.running_.size() + subprocs_.finished_.size();
return (int)subproc_number < config_.parallelism
&& ((subprocs_.running_.empty() || config_.max_load_average <= 0.0f)
|| GetLoadAverage() < config_.max_load_average);
}
bool RealCommandRunner::StartCommand(Edge* edge) {
string command = edge->EvaluateCommand();
Subprocess* subproc = subprocs_.Add(command, edge->use_console());
if (!subproc)
return false;
subproc_to_edge_.insert(make_pair(subproc, edge));
return true;
}
bool RealCommandRunner::WaitForCommand(Result* result) {
Subprocess* subproc;
while ((subproc = subprocs_.NextFinished()) == NULL) {
bool interrupted = subprocs_.DoWork();
if (interrupted)
return false;
}
result->status = subproc->Finish();
result->output = subproc->GetOutput();
map<Subprocess*, Edge*>::iterator e = subproc_to_edge_.find(subproc);
result->edge = e->second;
subproc_to_edge_.erase(e);
delete subproc;
return true;
}
Builder::Builder(State* state, const BuildConfig& config,
BuildLog* build_log, DepsLog* deps_log,
DiskInterface* disk_interface)
: state_(state), config_(config), disk_interface_(disk_interface),
scan_(state, build_log, deps_log, disk_interface) {
status_ = new BuildStatus(config);
}
Node* Builder::AddTarget(const string& name, string* err) {
Node* node = state_->LookupNode(name);
if (!node) {
*err = "unknown target: '" + name + "'";
return NULL;
}
if (!AddTarget(node, err))
return NULL;
return node;
}
bool Builder::AddTarget(Node* node, string* err) {
if (!scan_.RecomputeDirty(node, err))
return false;
if (Edge* in_edge = node->in_edge()) {
if (in_edge->outputs_ready())
return true; // Nothing to do.
}
if (!plan_.AddTarget(node, err))
return false;
return true;
}
bool Builder::ExtractDeps(CommandRunner::Result* result,
const string& deps_type,
const string& deps_prefix,
vector<Node*>* deps_nodes,
string* err) {
if (deps_type == "msvc") {
CLParser parser;
string output;
if (!parser.Parse(result->output, deps_prefix, &output, err))
return false;
result->output = output;
for (set<string>::iterator i = parser.includes_.begin();
i != parser.includes_.end(); ++i) {
// ~0 is assuming that with MSVC-parsed headers, it's ok to always make
// all backslashes (as some of the slashes will certainly be backslashes
// anyway). This could be fixed if necessary with some additional
// complexity in IncludesNormalize::Relativize.
deps_nodes->push_back(state_->GetNode(*i, ~0u));
}
} else
if (deps_type == "gcc") {
string depfile = result->edge->GetUnescapedDepfile();
if (depfile.empty()) {
*err = string("edge with deps=gcc but no depfile makes no sense");
return false;
}
// Read depfile content. Treat a missing depfile as empty.
string content;
switch (disk_interface_->ReadFile(depfile, &content, err)) {
case DiskInterface::Okay:
break;
case DiskInterface::NotFound:
err->clear();
break;
case DiskInterface::OtherError:
return false;
}
if (content.empty())
return true;
DepfileParser deps;
if (!deps.Parse(&content, err))
return false;
// XXX check depfile matches expected output.
deps_nodes->reserve(deps.ins_.size());
for (vector<StringPiece>::iterator i = deps.ins_.begin();
i != deps.ins_.end(); ++i) {
uint64_t slash_bits;
if (!CanonicalizePath(const_cast<char*>(i->str_), &i->len_, &slash_bits,
err))
return false;
deps_nodes->push_back(state_->GetNode(*i, slash_bits));
}
if (!g_keep_depfile) {
if (disk_interface_->RemoveFile(depfile) < 0) {
*err = string("deleting depfile: ") + strerror(errno) + string("\n");
return false;
}
}
} else {
Fatal("unknown deps type '%s'", deps_type.c_str());
}
return true;
}
*/
#[cfg(test)]
mod tests {
use super::*;
use super::super::test::TestWithStateAndVFS;
use super::super::graph::Node;
/// Fixture for tests involving Plan.
// Though Plan doesn't use State, it's useful to have one around
// to create Nodes and Edges.
struct PlanTestData {
plan: Plan,
}
impl Default for PlanTestData {
fn default() -> Self {
PlanTestData { plan: Plan::new() }
}
}
type PlanTest = TestWithStateAndVFS<PlanTestData>;
impl PlanTest {
pub fn new() -> Self {
Self::new_with_builtin_rule()
}
}
/*
/// Fixture for tests involving Plan.
// Though Plan doesn't use State, it's useful to have one around
// to create Nodes and Edges.
struct PlanTest : public StateTestWithBuiltinRules {
Plan plan_;
/// Because FindWork does not return Edges in any sort of predictable order,
// provide a means to get available Edges in order and in a format which is
// easy to write tests around.
void FindWorkSorted(deque<Edge*>* ret, int count) {
struct CompareEdgesByOutput {
static bool cmp(const Edge* a, const Edge* b) {
return a->outputs_[0]->path() < b->outputs_[0]->path();
}
};
for (int i = 0; i < count; ++i) {
ASSERT_TRUE(plan_.more_to_do());
Edge* edge = plan_.FindWork();
ASSERT_TRUE(edge);
ret->push_back(edge);
}
ASSERT_FALSE(plan_.FindWork());
sort(ret->begin(), ret->end(), CompareEdgesByOutput::cmp);
}
void TestPoolWithDepthOne(const char *test_case);
};
*/
#[test]
fn plantest_basic() {
let mut plantest = PlanTest::new();
plantest.assert_parse(
concat!("build out: cat mid\n", "build mid: cat in\n").as_bytes(),
);
plantest.assert_with_node_mut(b"mid", Node::mark_dirty);
plantest.assert_with_node_mut(b"out", Node::mark_dirty);
let out_node_idx = plantest.assert_node_idx(b"out");
let mut state = plantest.state.borrow_mut();
let state = &mut *state;
let plan = &mut plantest.other.plan;
assert_eq!(Ok(true), plan.add_target(state, out_node_idx));
assert_eq!(true, plan.more_to_do());
let edge_idx = plan.find_work().unwrap();
{
let edge = state.edge_state.get_edge(edge_idx);
let input0 = edge.inputs[0];
assert_eq!(b"in", state.node_state.get_node(input0).path());
let output0 = edge.outputs[0];
assert_eq!(b"mid", state.node_state.get_node(output0).path());
}
assert_eq!(None, plan.find_work());
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap();
{
let edge = state.edge_state.get_edge(edge_idx);
let input0 = edge.inputs[0];
assert_eq!(b"mid", state.node_state.get_node(input0).path());
let output0 = edge.outputs[0];
assert_eq!(b"out", state.node_state.get_node(output0).path());
}
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
assert_eq!(false, plan.more_to_do());
assert_eq!(None, plan.find_work());
}
// Test that two outputs from one rule can be handled as inputs to the next.
#[test]
fn plantest_double_output_direct() {
let mut plantest = PlanTest::new();
plantest.assert_parse(
concat!("build out: cat mid1 mid2\n", "build mid1 mid2: cat in\n").as_bytes(),
);
plantest.assert_with_node_mut(b"mid1", Node::mark_dirty);
plantest.assert_with_node_mut(b"mid2", Node::mark_dirty);
plantest.assert_with_node_mut(b"out", Node::mark_dirty);
let out_node_idx = plantest.assert_node_idx(b"out");
let mut state = plantest.state.borrow_mut();
let state = &mut *state;
let plan = &mut plantest.other.plan;
assert_eq!(Ok(true), plan.add_target(state, out_node_idx));
assert_eq!(true, plan.more_to_do());
let edge_idx = plan.find_work().unwrap(); // cat in
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat mid1 mid2
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
assert_eq!(None, plan.find_work()); // done
}
// Test that two outputs from one rule can eventually be routed to another.
#[test]
fn plantest_double_output_indirect() {
let mut plantest = PlanTest::new();
plantest.assert_parse(
concat!(
"build out: cat b1 b2\n",
"build b1: cat a1\n",
"build b2: cat a2\n",
"build a1 a2: cat in\n"
).as_bytes(),
);
plantest.assert_with_node_mut(b"a1", Node::mark_dirty);
plantest.assert_with_node_mut(b"a2", Node::mark_dirty);
plantest.assert_with_node_mut(b"b1", Node::mark_dirty);
plantest.assert_with_node_mut(b"b2", Node::mark_dirty);
plantest.assert_with_node_mut(b"out", Node::mark_dirty);
let out_node_idx = plantest.assert_node_idx(b"out");
let mut state = plantest.state.borrow_mut();
let state = &mut *state;
let plan = &mut plantest.other.plan;
assert_eq!(Ok(true), plan.add_target(state, out_node_idx));
assert_eq!(true, plan.more_to_do());
let edge_idx = plan.find_work().unwrap(); // cat in
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat a1
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat a2
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat b1 b2
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
assert_eq!(None, plan.find_work()); // done
}
// Test that two edges from one output can both execute.
#[test]
fn plantest_double_dependent() {
let mut plantest = PlanTest::new();
plantest.assert_parse(
concat!(
"build out: cat a1 a2\n",
"build a1: cat mid\n",
"build a2: cat mid\n",
"build mid: cat in\n"
).as_bytes(),
);
plantest.assert_with_node_mut(b"mid", Node::mark_dirty);
plantest.assert_with_node_mut(b"a1", Node::mark_dirty);
plantest.assert_with_node_mut(b"a2", Node::mark_dirty);
plantest.assert_with_node_mut(b"out", Node::mark_dirty);
let out_node_idx = plantest.assert_node_idx(b"out");
let mut state = plantest.state.borrow_mut();
let state = &mut *state;
let plan = &mut plantest.other.plan;
assert_eq!(Ok(true), plan.add_target(state, out_node_idx));
assert_eq!(true, plan.more_to_do());
let edge_idx = plan.find_work().unwrap(); // cat in
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat mid
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat mid
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap(); // cat a1 a2
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
assert_eq!(None, plan.find_work()); // done
}
fn test_pool_with_depth_one_helper(plantest: &mut PlanTest, test_case: &[u8]) {
plantest.assert_parse(test_case);
plantest.assert_with_node_mut(b"out1", Node::mark_dirty);
plantest.assert_with_node_mut(b"out2", Node::mark_dirty);
let out1_node_idx = plantest.assert_node_idx(b"out1");
let out2_node_idx = plantest.assert_node_idx(b"out2");
let mut state = plantest.state.borrow_mut();
let state = &mut *state;
let plan = &mut plantest.other.plan;
assert_eq!(Ok(true), plan.add_target(state, out1_node_idx));
assert_eq!(Ok(true), plan.add_target(state, out2_node_idx));
assert_eq!(true, plan.more_to_do());
let edge_idx = plan.find_work().unwrap();
{
let edge = state.edge_state.get_edge(edge_idx);
let edge_in0_idx = edge.inputs.get(0).cloned().unwrap();
let edge_in0_node = state.node_state.get_node(edge_in0_idx);
assert_eq!(b"in".as_ref(), edge_in0_node.path());
let edge_out0_idx = edge.outputs.get(0).cloned().unwrap();
let edge_out0_node = state.node_state.get_node(edge_out0_idx);
assert_eq!(b"out1".as_ref(), edge_out0_node.path());
}
// This will be false since poolcat is serialized
assert!(plan.find_work().is_none());
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
let edge_idx = plan.find_work().unwrap();
{
let edge = state.edge_state.get_edge(edge_idx);
let edge_in0_idx = edge.inputs.get(0).cloned().unwrap();
let edge_in0_node = state.node_state.get_node(edge_in0_idx);
assert_eq!(b"in".as_ref(), edge_in0_node.path());
let edge_out0_idx = edge.outputs.get(0).cloned().unwrap();
let edge_out0_node = state.node_state.get_node(edge_out0_idx);
assert_eq!(b"out2".as_ref(), edge_out0_node.path());
}
// This will be false since poolcat is serialized
assert!(plan.find_work().is_none());
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
assert_eq!(false, plan.more_to_do());
assert_eq!(None, plan.find_work()); // done
}
#[test]
fn plantest_pool_with_depth_one() {
let mut plantest = PlanTest::new();
test_pool_with_depth_one_helper(
&mut plantest,
concat!(
"pool foobar\n",
" depth = 1\n",
"rule poolcat\n",
" command = cat $in > $out\n",
" pool = foobar\n",
"build out1: poolcat in\n",
"build out2: poolcat in\n"
).as_bytes(),
);
}
#[test]
fn plantest_console_pool() {
let mut plantest = PlanTest::new();
test_pool_with_depth_one_helper(
&mut plantest,
concat!(
"rule poolcat\n",
" command = cat $in > $out\n",
" pool = console\n",
"build out1: poolcat in\n",
"build out2: poolcat in\n",
).as_bytes(),
);
}
/// Because FindWork does not return Edges in any sort of predictable order,
// provide a means to get available Edges in order and in a format which is
// easy to write tests around.
fn find_work_sorted_helper(
plan: &mut Plan,
state: &State,
count: usize,
) -> VecDeque<EdgeIndex> {
let mut result = (0..count)
.map(|i| {
assert!(plan.more_to_do());
plan.find_work().unwrap()
})
.collect::<Vec<_>>();
assert!(plan.find_work().is_none());
result.sort_by_key(|e| {
state
.node_state
.get_node(state.edge_state.get_edge(*e).outputs[0])
.path()
});
result.into_iter().collect()
}
#[test]
fn plantest_pools_with_depth_two() {
let mut plantest = PlanTest::new();
plantest.assert_parse(
concat!(
"pool foobar\n",
" depth = 2\n",
"pool bazbin\n",
" depth = 2\n",
"rule foocat\n",
" command = cat $in > $out\n",
" pool = foobar\n",
"rule bazcat\n",
" command = cat $in > $out\n",
" pool = bazbin\n",
"build out1: foocat in\n",
"build out2: foocat in\n",
"build out3: foocat in\n",
"build outb1: bazcat in\n",
"build outb2: bazcat in\n",
"build outb3: bazcat in\n",
" pool =\n",
"build allTheThings: cat out1 out2 out3 outb1 outb2 outb3\n"
).as_bytes(),
);
[
b"out1".as_ref(),
b"out2".as_ref(),
b"out3".as_ref(),
b"outb1".as_ref(),
b"outb2".as_ref(),
b"outb3".as_ref(),
b"allTheThings".as_ref(),
].as_ref()
.iter()
.for_each(|path| {
plantest.assert_with_node_mut(path, Node::mark_dirty);
});
let mut state = plantest.state.borrow_mut();
let state = &mut *state;
let plan = &mut plantest.other.plan;
let all_the_things_node = state.node_state.lookup_node(b"allTheThings").unwrap();
assert_eq!(Ok(true), plan.add_target(state, all_the_things_node));
let mut edges = find_work_sorted_helper(plan, state, 5);
{
let edge_idx = edges[0];
let edge = state.edge_state.get_edge(edge_idx);
assert_eq!(
b"in".as_ref(),
state.node_state.get_node(edge.inputs[0]).path()
);
assert_eq!(
b"out1".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
{
let edge_idx = edges[1];
let edge = state.edge_state.get_edge(edge_idx);
assert_eq!(
b"in".as_ref(),
state.node_state.get_node(edge.inputs[0]).path()
);
assert_eq!(
b"out2".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
{
let edge_idx = edges[2];
let edge = state.edge_state.get_edge(edge_idx);
assert_eq!(
b"in".as_ref(),
state.node_state.get_node(edge.inputs[0]).path()
);
assert_eq!(
b"outb1".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
{
let edge_idx = edges[3];
let edge = state.edge_state.get_edge(edge_idx);
assert_eq!(
b"in".as_ref(),
state.node_state.get_node(edge.inputs[0]).path()
);
assert_eq!(
b"outb2".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
{
let edge_idx = edges[4];
let edge = state.edge_state.get_edge(edge_idx);
assert_eq!(
b"in".as_ref(),
state.node_state.get_node(edge.inputs[0]).path()
);
assert_eq!(
b"outb3".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
// finish out1
plan.edge_finished(state, edges.pop_front().unwrap(), EdgeResult::EdgeSucceeded);
let out3_idx = plan.find_work().unwrap();
{
let edge = state.edge_state.get_edge(out3_idx);
assert_eq!(
b"in".as_ref(),
state.node_state.get_node(edge.inputs[0]).path()
);
assert_eq!(
b"out3".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
assert!(plan.find_work().is_none());
plan.edge_finished(state, out3_idx, EdgeResult::EdgeSucceeded);
assert!(plan.find_work().is_none());
edges.into_iter().for_each(|edge_idx| {
plan.edge_finished(state, edge_idx, EdgeResult::EdgeSucceeded);
});
let last_idx = plan.find_work().unwrap();
{
let edge = state.edge_state.get_edge(last_idx);
assert_eq!(
b"allTheThings".as_ref(),
state.node_state.get_node(edge.outputs[0]).path()
)
}
plan.edge_finished(state, last_idx, EdgeResult::EdgeSucceeded);
assert_eq!(false, plan.more_to_do());
assert_eq!(None, plan.find_work()); // done
}
/*
TEST_F(PlanTest, PoolWithRedundantEdges) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"pool compile\n"
" depth = 1\n"
"rule gen_foo\n"
" command = touch foo.cpp\n"
"rule gen_bar\n"
" command = touch bar.cpp\n"
"rule echo\n"
" command = echo $out > $out\n"
"build foo.cpp.obj: echo foo.cpp || foo.cpp\n"
" pool = compile\n"
"build bar.cpp.obj: echo bar.cpp || bar.cpp\n"
" pool = compile\n"
"build libfoo.a: echo foo.cpp.obj bar.cpp.obj\n"
"build foo.cpp: gen_foo\n"
"build bar.cpp: gen_bar\n"
"build all: phony libfoo.a\n"));
GetNode("foo.cpp")->MarkDirty();
GetNode("foo.cpp.obj")->MarkDirty();
GetNode("bar.cpp")->MarkDirty();
GetNode("bar.cpp.obj")->MarkDirty();
GetNode("libfoo.a")->MarkDirty();
GetNode("all")->MarkDirty();
string err;
EXPECT_TRUE(plan_.AddTarget(GetNode("all"), &err));
ASSERT_EQ("", err);
ASSERT_TRUE(plan_.more_to_do());
Edge* edge = NULL;
deque<Edge*> initial_edges;
FindWorkSorted(&initial_edges, 2);
edge = initial_edges[1]; // Foo first
ASSERT_EQ("foo.cpp", edge->outputs_[0]->path());
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded);
edge = plan_.FindWork();
ASSERT_TRUE(edge);
ASSERT_FALSE(plan_.FindWork());
ASSERT_EQ("foo.cpp", edge->inputs_[0]->path());
ASSERT_EQ("foo.cpp", edge->inputs_[1]->path());
ASSERT_EQ("foo.cpp.obj", edge->outputs_[0]->path());
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded);
edge = initial_edges[0]; // Now for bar
ASSERT_EQ("bar.cpp", edge->outputs_[0]->path());
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded);
edge = plan_.FindWork();
ASSERT_TRUE(edge);
ASSERT_FALSE(plan_.FindWork());
ASSERT_EQ("bar.cpp", edge->inputs_[0]->path());
ASSERT_EQ("bar.cpp", edge->inputs_[1]->path());
ASSERT_EQ("bar.cpp.obj", edge->outputs_[0]->path());
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded);
edge = plan_.FindWork();
ASSERT_TRUE(edge);
ASSERT_FALSE(plan_.FindWork());
ASSERT_EQ("foo.cpp.obj", edge->inputs_[0]->path());
ASSERT_EQ("bar.cpp.obj", edge->inputs_[1]->path());
ASSERT_EQ("libfoo.a", edge->outputs_[0]->path());
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded);
edge = plan_.FindWork();
ASSERT_TRUE(edge);
ASSERT_FALSE(plan_.FindWork());
ASSERT_EQ("libfoo.a", edge->inputs_[0]->path());
ASSERT_EQ("all", edge->outputs_[0]->path());
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded);
edge = plan_.FindWork();
ASSERT_FALSE(edge);
ASSERT_FALSE(plan_.more_to_do());
}
TEST_F(PlanTest, PoolWithFailingEdge) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"pool foobar\n"
" depth = 1\n"
"rule poolcat\n"
" command = cat $in > $out\n"
" pool = foobar\n"
"build out1: poolcat in\n"
"build out2: poolcat in\n"));
GetNode("out1")->MarkDirty();
GetNode("out2")->MarkDirty();
string err;
EXPECT_TRUE(plan_.AddTarget(GetNode("out1"), &err));
ASSERT_EQ("", err);
EXPECT_TRUE(plan_.AddTarget(GetNode("out2"), &err));
ASSERT_EQ("", err);
ASSERT_TRUE(plan_.more_to_do());
Edge* edge = plan_.FindWork();
ASSERT_TRUE(edge);
ASSERT_EQ("in", edge->inputs_[0]->path());
ASSERT_EQ("out1", edge->outputs_[0]->path());
// This will be false since poolcat is serialized
ASSERT_FALSE(plan_.FindWork());
plan_.EdgeFinished(edge, Plan::kEdgeFailed);
edge = plan_.FindWork();
ASSERT_TRUE(edge);
ASSERT_EQ("in", edge->inputs_[0]->path());
ASSERT_EQ("out2", edge->outputs_[0]->path());
ASSERT_FALSE(plan_.FindWork());
plan_.EdgeFinished(edge, Plan::kEdgeFailed);
ASSERT_TRUE(plan_.more_to_do()); // Jobs have failed
edge = plan_.FindWork();
ASSERT_EQ(0, edge);
}
/// Fake implementation of CommandRunner, useful for tests.
struct FakeCommandRunner : public CommandRunner {
explicit FakeCommandRunner(VirtualFileSystem* fs) :
last_command_(NULL), fs_(fs) {}
// CommandRunner impl
virtual bool CanRunMore();
virtual bool StartCommand(Edge* edge);
virtual bool WaitForCommand(Result* result);
virtual vector<Edge*> GetActiveEdges();
virtual void Abort();
vector<string> commands_ran_;
Edge* last_command_;
VirtualFileSystem* fs_;
};
*/
/*
struct BuildTest : public StateTestWithBuiltinRules, public BuildLogUser {
BuildTest() : config_(MakeConfig()), command_runner_(&fs_),
builder_(&state_, config_, NULL, NULL, &fs_),
status_(config_) {
}
virtual void SetUp() {
StateTestWithBuiltinRules::SetUp();
builder_.command_runner_.reset(&command_runner_);
AssertParse(&state_,
"build cat1: cat in1\n"
"build cat2: cat in1 in2\n"
"build cat12: cat cat1 cat2\n");
fs_.Create("in1", "");
fs_.Create("in2", "");
}
~BuildTest() {
builder_.command_runner_.release();
}
virtual bool IsPathDead(StringPiece s) const { return false; }
/// Rebuild target in the 'working tree' (fs_).
/// State of command_runner_ and logs contents (if specified) ARE MODIFIED.
/// Handy to check for NOOP builds, and higher-level rebuild tests.
void RebuildTarget(const string& target, const char* manifest,
const char* log_path = NULL, const char* deps_path = NULL,
State* state = NULL);
// Mark a path dirty.
void Dirty(const string& path);
BuildConfig MakeConfig() {
BuildConfig config;
config.verbosity = BuildConfig::QUIET;
return config;
}
BuildConfig config_;
FakeCommandRunner command_runner_;
VirtualFileSystem fs_;
Builder builder_;
BuildStatus status_;
};
void BuildTest::RebuildTarget(const string& target, const char* manifest,
const char* log_path, const char* deps_path,
State* state) {
State local_state, *pstate = &local_state;
if (state)
pstate = state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(pstate));
AssertParse(pstate, manifest);
string err;
BuildLog build_log, *pbuild_log = NULL;
if (log_path) {
ASSERT_TRUE(build_log.Load(log_path, &err));
ASSERT_TRUE(build_log.OpenForWrite(log_path, *this, &err));
ASSERT_EQ("", err);
pbuild_log = &build_log;
}
DepsLog deps_log, *pdeps_log = NULL;
if (deps_path) {
ASSERT_TRUE(deps_log.Load(deps_path, pstate, &err));
ASSERT_TRUE(deps_log.OpenForWrite(deps_path, &err));
ASSERT_EQ("", err);
pdeps_log = &deps_log;
}
Builder builder(pstate, config_, pbuild_log, pdeps_log, &fs_);
EXPECT_TRUE(builder.AddTarget(target, &err));
command_runner_.commands_ran_.clear();
builder.command_runner_.reset(&command_runner_);
if (!builder.AlreadyUpToDate()) {
bool build_res = builder.Build(&err);
EXPECT_TRUE(build_res);
}
builder.command_runner_.release();
}
bool FakeCommandRunner::CanRunMore() {
// Only run one at a time.
return last_command_ == NULL;
}
bool FakeCommandRunner::StartCommand(Edge* edge) {
assert(!last_command_);
commands_ran_.push_back(edge->EvaluateCommand());
if (edge->rule().name() == "cat" ||
edge->rule().name() == "cat_rsp" ||
edge->rule().name() == "cat_rsp_out" ||
edge->rule().name() == "cc" ||
edge->rule().name() == "touch" ||
edge->rule().name() == "touch-interrupt" ||
edge->rule().name() == "touch-fail-tick2") {
for (vector<Node*>::iterator out = edge->outputs_.begin();
out != edge->outputs_.end(); ++out) {
fs_->Create((*out)->path(), "");
}
} else if (edge->rule().name() == "true" ||
edge->rule().name() == "fail" ||
edge->rule().name() == "interrupt" ||
edge->rule().name() == "console") {
// Don't do anything.
} else {
printf("unknown command\n");
return false;
}
last_command_ = edge;
return true;
}
bool FakeCommandRunner::WaitForCommand(Result* result) {
if (!last_command_)
return false;
Edge* edge = last_command_;
result->edge = edge;
if (edge->rule().name() == "interrupt" ||
edge->rule().name() == "touch-interrupt") {
result->status = ExitInterrupted;
return true;
}
if (edge->rule().name() == "console") {
if (edge->use_console())
result->status = ExitSuccess;
else
result->status = ExitFailure;
last_command_ = NULL;
return true;
}
if (edge->rule().name() == "fail" ||
(edge->rule().name() == "touch-fail-tick2" && fs_->now_ == 2))
result->status = ExitFailure;
else
result->status = ExitSuccess;
last_command_ = NULL;
return true;
}
vector<Edge*> FakeCommandRunner::GetActiveEdges() {
vector<Edge*> edges;
if (last_command_)
edges.push_back(last_command_);
return edges;
}
void FakeCommandRunner::Abort() {
last_command_ = NULL;
}
void BuildTest::Dirty(const string& path) {
Node* node = GetNode(path);
node->MarkDirty();
// If it's an input file, mark that we've already stat()ed it and
// it's missing.
if (!node->in_edge())
node->MarkMissing();
}
TEST_F(BuildTest, NoWork) {
string err;
EXPECT_TRUE(builder_.AlreadyUpToDate());
}
TEST_F(BuildTest, OneStep) {
// Given a dirty target with one ready input,
// we should rebuild the target.
Dirty("cat1");
string err;
EXPECT_TRUE(builder_.AddTarget("cat1", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
EXPECT_EQ("cat in1 > cat1", command_runner_.commands_ran_[0]);
}
TEST_F(BuildTest, OneStep2) {
// Given a target with one dirty input,
// we should rebuild the target.
Dirty("cat1");
string err;
EXPECT_TRUE(builder_.AddTarget("cat1", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
EXPECT_EQ("cat in1 > cat1", command_runner_.commands_ran_[0]);
}
TEST_F(BuildTest, TwoStep) {
string err;
EXPECT_TRUE(builder_.AddTarget("cat12", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
// Depending on how the pointers work out, we could've ran
// the first two commands in either order.
EXPECT_TRUE((command_runner_.commands_ran_[0] == "cat in1 > cat1" &&
command_runner_.commands_ran_[1] == "cat in1 in2 > cat2") ||
(command_runner_.commands_ran_[1] == "cat in1 > cat1" &&
command_runner_.commands_ran_[0] == "cat in1 in2 > cat2"));
EXPECT_EQ("cat cat1 cat2 > cat12", command_runner_.commands_ran_[2]);
fs_.Tick();
// Modifying in2 requires rebuilding one intermediate file
// and the final file.
fs_.Create("in2", "");
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("cat12", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(5u, command_runner_.commands_ran_.size());
EXPECT_EQ("cat in1 in2 > cat2", command_runner_.commands_ran_[3]);
EXPECT_EQ("cat cat1 cat2 > cat12", command_runner_.commands_ran_[4]);
}
TEST_F(BuildTest, TwoOutputs) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch\n"
" command = touch $out\n"
"build out1 out2: touch in.txt\n"));
fs_.Create("in.txt", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out1", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
EXPECT_EQ("touch out1 out2", command_runner_.commands_ran_[0]);
}
TEST_F(BuildTest, ImplicitOutput) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch\n"
" command = touch $out $out.imp\n"
"build out | out.imp: touch in.txt\n"));
fs_.Create("in.txt", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[0]);
}
// Test case from
// https://github.com/ninja-build/ninja/issues/148
TEST_F(BuildTest, MultiOutIn) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch\n"
" command = touch $out\n"
"build in1 otherfile: touch in\n"
"build out: touch in | in1\n"));
fs_.Create("in", "");
fs_.Tick();
fs_.Create("in1", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
}
TEST_F(BuildTest, Chain) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build c2: cat c1\n"
"build c3: cat c2\n"
"build c4: cat c3\n"
"build c5: cat c4\n"));
fs_.Create("c1", "");
string err;
EXPECT_TRUE(builder_.AddTarget("c5", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(4u, command_runner_.commands_ran_.size());
err.clear();
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("c5", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
fs_.Tick();
fs_.Create("c3", "");
err.clear();
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("c5", &err));
ASSERT_EQ("", err);
EXPECT_FALSE(builder_.AlreadyUpToDate());
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(2u, command_runner_.commands_ran_.size()); // 3->4, 4->5
}
TEST_F(BuildTest, MissingInput) {
// Input is referenced by build file, but no rule for it.
string err;
Dirty("in1");
EXPECT_FALSE(builder_.AddTarget("cat1", &err));
EXPECT_EQ("'in1', needed by 'cat1', missing and no known rule to make it",
err);
}
TEST_F(BuildTest, MissingTarget) {
// Target is not referenced by build file.
string err;
EXPECT_FALSE(builder_.AddTarget("meow", &err));
EXPECT_EQ("unknown target: 'meow'", err);
}
TEST_F(BuildTest, MakeDirs) {
string err;
#ifdef _WIN32
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build subdir\\dir2\\file: cat in1\n"));
#else
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build subdir/dir2/file: cat in1\n"));
#endif
EXPECT_TRUE(builder_.AddTarget("subdir/dir2/file", &err));
EXPECT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(2u, fs_.directories_made_.size());
EXPECT_EQ("subdir", fs_.directories_made_[0]);
EXPECT_EQ("subdir/dir2", fs_.directories_made_[1]);
}
TEST_F(BuildTest, DepFileMissing) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n command = cc $in\n depfile = $out.d\n"
"build fo$ o.o: cc foo.c\n"));
fs_.Create("foo.c", "");
EXPECT_TRUE(builder_.AddTarget("fo o.o", &err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, fs_.files_read_.size());
EXPECT_EQ("fo o.o.d", fs_.files_read_[0]);
}
TEST_F(BuildTest, DepFileOK) {
string err;
int orig_edges = state_.edges_.size();
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n command = cc $in\n depfile = $out.d\n"
"build foo.o: cc foo.c\n"));
Edge* edge = state_.edges_.back();
fs_.Create("foo.c", "");
GetNode("bar.h")->MarkDirty(); // Mark bar.h as missing.
fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n");
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, fs_.files_read_.size());
EXPECT_EQ("foo.o.d", fs_.files_read_[0]);
// Expect three new edges: one generating foo.o, and two more from
// loading the depfile.
ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size());
// Expect our edge to now have three inputs: foo.c and two headers.
ASSERT_EQ(3u, edge->inputs_.size());
// Expect the command line we generate to only use the original input.
ASSERT_EQ("cc foo.c", edge->EvaluateCommand());
}
TEST_F(BuildTest, DepFileParseError) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n command = cc $in\n depfile = $out.d\n"
"build foo.o: cc foo.c\n"));
fs_.Create("foo.c", "");
fs_.Create("foo.o.d", "randomtext\n");
EXPECT_FALSE(builder_.AddTarget("foo.o", &err));
EXPECT_EQ("foo.o.d: expected ':' in depfile", err);
}
TEST_F(BuildTest, EncounterReadyTwice) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch\n"
" command = touch $out\n"
"build c: touch\n"
"build b: touch || c\n"
"build a: touch | b || c\n"));
vector<Edge*> c_out = GetNode("c")->out_edges();
ASSERT_EQ(2u, c_out.size());
EXPECT_EQ("b", c_out[0]->outputs_[0]->path());
EXPECT_EQ("a", c_out[1]->outputs_[0]->path());
fs_.Create("b", "");
EXPECT_TRUE(builder_.AddTarget("a", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, OrderOnlyDeps) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n command = cc $in\n depfile = $out.d\n"
"build foo.o: cc foo.c || otherfile\n"));
Edge* edge = state_.edges_.back();
fs_.Create("foo.c", "");
fs_.Create("otherfile", "");
fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n");
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
ASSERT_EQ("", err);
// One explicit, two implicit, one order only.
ASSERT_EQ(4u, edge->inputs_.size());
EXPECT_EQ(2, edge->implicit_deps_);
EXPECT_EQ(1, edge->order_only_deps_);
// Verify the inputs are in the order we expect
// (explicit then implicit then orderonly).
EXPECT_EQ("foo.c", edge->inputs_[0]->path());
EXPECT_EQ("blah.h", edge->inputs_[1]->path());
EXPECT_EQ("bar.h", edge->inputs_[2]->path());
EXPECT_EQ("otherfile", edge->inputs_[3]->path());
// Expect the command line we generate to only use the original input.
ASSERT_EQ("cc foo.c", edge->EvaluateCommand());
// explicit dep dirty, expect a rebuild.
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
fs_.Tick();
// Recreate the depfile, as it should have been deleted by the build.
fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n");
// implicit dep dirty, expect a rebuild.
fs_.Create("blah.h", "");
fs_.Create("bar.h", "");
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
fs_.Tick();
// Recreate the depfile, as it should have been deleted by the build.
fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n");
// order only dep dirty, no rebuild.
fs_.Create("otherfile", "");
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
// implicit dep missing, expect rebuild.
fs_.RemoveFile("bar.h");
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, RebuildOrderOnlyDeps) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n command = cc $in\n"
"rule true\n command = true\n"
"build oo.h: cc oo.h.in\n"
"build foo.o: cc foo.c || oo.h\n"));
fs_.Create("foo.c", "");
fs_.Create("oo.h.in", "");
// foo.o and order-only dep dirty, build both.
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// all clean, no rebuild.
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
// order-only dep missing, build it only.
fs_.RemoveFile("oo.h");
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
ASSERT_EQ("cc oo.h.in", command_runner_.commands_ran_[0]);
fs_.Tick();
// order-only dep dirty, build it only.
fs_.Create("oo.h.in", "");
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("foo.o", &err));
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
ASSERT_EQ("cc oo.h.in", command_runner_.commands_ran_[0]);
}
#ifdef _WIN32
TEST_F(BuildTest, DepFileCanonicalize) {
string err;
int orig_edges = state_.edges_.size();
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n command = cc $in\n depfile = $out.d\n"
"build gen/stuff\\things/foo.o: cc x\\y/z\\foo.c\n"));
Edge* edge = state_.edges_.back();
fs_.Create("x/y/z/foo.c", "");
GetNode("bar.h")->MarkDirty(); // Mark bar.h as missing.
// Note, different slashes from manifest.
fs_.Create("gen/stuff\\things/foo.o.d",
"gen\\stuff\\things\\foo.o: blah.h bar.h\n");
EXPECT_TRUE(builder_.AddTarget("gen/stuff/things/foo.o", &err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, fs_.files_read_.size());
// The depfile path does not get Canonicalize as it seems unnecessary.
EXPECT_EQ("gen/stuff\\things/foo.o.d", fs_.files_read_[0]);
// Expect three new edges: one generating foo.o, and two more from
// loading the depfile.
ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size());
// Expect our edge to now have three inputs: foo.c and two headers.
ASSERT_EQ(3u, edge->inputs_.size());
// Expect the command line we generate to only use the original input, and
// using the slashes from the manifest.
ASSERT_EQ("cc x\\y/z\\foo.c", edge->EvaluateCommand());
}
#endif
TEST_F(BuildTest, Phony) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build out: cat bar.cc\n"
"build all: phony out\n"));
fs_.Create("bar.cc", "");
EXPECT_TRUE(builder_.AddTarget("all", &err));
ASSERT_EQ("", err);
// Only one command to run, because phony runs no command.
EXPECT_FALSE(builder_.AlreadyUpToDate());
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, PhonyNoWork) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build out: cat bar.cc\n"
"build all: phony out\n"));
fs_.Create("bar.cc", "");
fs_.Create("out", "");
EXPECT_TRUE(builder_.AddTarget("all", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
}
// Test a self-referencing phony. Ideally this should not work, but
// ninja 1.7 and below tolerated and CMake 2.8.12.x and 3.0.x both
// incorrectly produce it. We tolerate it for compatibility.
TEST_F(BuildTest, PhonySelfReference) {
string err;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build a: phony a\n"));
EXPECT_TRUE(builder_.AddTarget("a", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
}
TEST_F(BuildTest, Fail) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule fail\n"
" command = fail\n"
"build out1: fail\n"));
string err;
EXPECT_TRUE(builder_.AddTarget("out1", &err));
ASSERT_EQ("", err);
EXPECT_FALSE(builder_.Build(&err));
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
ASSERT_EQ("subcommand failed", err);
}
TEST_F(BuildTest, SwallowFailures) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule fail\n"
" command = fail\n"
"build out1: fail\n"
"build out2: fail\n"
"build out3: fail\n"
"build all: phony out1 out2 out3\n"));
// Swallow two failures, die on the third.
config_.failures_allowed = 3;
string err;
EXPECT_TRUE(builder_.AddTarget("all", &err));
ASSERT_EQ("", err);
EXPECT_FALSE(builder_.Build(&err));
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
ASSERT_EQ("subcommands failed", err);
}
TEST_F(BuildTest, SwallowFailuresLimit) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule fail\n"
" command = fail\n"
"build out1: fail\n"
"build out2: fail\n"
"build out3: fail\n"
"build final: cat out1 out2 out3\n"));
// Swallow ten failures; we should stop before building final.
config_.failures_allowed = 11;
string err;
EXPECT_TRUE(builder_.AddTarget("final", &err));
ASSERT_EQ("", err);
EXPECT_FALSE(builder_.Build(&err));
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
ASSERT_EQ("cannot make progress due to previous errors", err);
}
TEST_F(BuildTest, SwallowFailuresPool) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"pool failpool\n"
" depth = 1\n"
"rule fail\n"
" command = fail\n"
" pool = failpool\n"
"build out1: fail\n"
"build out2: fail\n"
"build out3: fail\n"
"build final: cat out1 out2 out3\n"));
// Swallow ten failures; we should stop before building final.
config_.failures_allowed = 11;
string err;
EXPECT_TRUE(builder_.AddTarget("final", &err));
ASSERT_EQ("", err);
EXPECT_FALSE(builder_.Build(&err));
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
ASSERT_EQ("cannot make progress due to previous errors", err);
}
TEST_F(BuildTest, PoolEdgesReadyButNotWanted) {
fs_.Create("x", "");
const char* manifest =
"pool some_pool\n"
" depth = 4\n"
"rule touch\n"
" command = touch $out\n"
" pool = some_pool\n"
"rule cc\n"
" command = touch grit\n"
"\n"
"build B.d.stamp: cc | x\n"
"build C.stamp: touch B.d.stamp\n"
"build final.stamp: touch || C.stamp\n";
RebuildTarget("final.stamp", manifest);
fs_.RemoveFile("B.d.stamp");
State save_state;
RebuildTarget("final.stamp", manifest, NULL, NULL, &save_state);
EXPECT_GE(save_state.LookupPool("some_pool")->current_use(), 0);
}
struct BuildWithLogTest : public BuildTest {
BuildWithLogTest() {
builder_.SetBuildLog(&build_log_);
}
BuildLog build_log_;
};
TEST_F(BuildWithLogTest, NotInLogButOnDisk) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n"
" command = cc\n"
"build out1: cc in\n"));
// Create input/output that would be considered up to date when
// not considering the command line hash.
fs_.Create("in", "");
fs_.Create("out1", "");
string err;
// Because it's not in the log, it should not be up-to-date until
// we build again.
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_FALSE(builder_.AlreadyUpToDate());
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_TRUE(builder_.Build(&err));
EXPECT_TRUE(builder_.AlreadyUpToDate());
}
TEST_F(BuildWithLogTest, RebuildAfterFailure) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch-fail-tick2\n"
" command = touch-fail-tick2\n"
"build out1: touch-fail-tick2 in\n"));
string err;
fs_.Create("in", "");
// Run once successfully to get out1 in the log
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
command_runner_.commands_ran_.clear();
state_.Reset();
builder_.Cleanup();
builder_.plan_.Reset();
fs_.Tick();
fs_.Create("in", "");
// Run again with a failure that updates the output file timestamp
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_FALSE(builder_.Build(&err));
EXPECT_EQ("subcommand failed", err);
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
command_runner_.commands_ran_.clear();
state_.Reset();
builder_.Cleanup();
builder_.plan_.Reset();
fs_.Tick();
// Run again, should rerun even though the output file is up to date on disk
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_FALSE(builder_.AlreadyUpToDate());
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
EXPECT_EQ("", err);
}
TEST_F(BuildWithLogTest, RebuildWithNoInputs) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule touch\n"
" command = touch\n"
"build out1: touch\n"
"build out2: touch in\n"));
string err;
fs_.Create("in", "");
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_TRUE(builder_.AddTarget("out2", &err));
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
EXPECT_EQ(2u, command_runner_.commands_ran_.size());
command_runner_.commands_ran_.clear();
state_.Reset();
fs_.Tick();
fs_.Create("in", "");
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_TRUE(builder_.AddTarget("out2", &err));
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildWithLogTest, RestatTest) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule true\n"
" command = true\n"
" restat = 1\n"
"rule cc\n"
" command = cc\n"
" restat = 1\n"
"build out1: cc in\n"
"build out2: true out1\n"
"build out3: cat out2\n"));
fs_.Create("out1", "");
fs_.Create("out2", "");
fs_.Create("out3", "");
fs_.Tick();
fs_.Create("in", "");
// Do a pre-build so that there's commands in the log for the outputs,
// otherwise, the lack of an entry in the build log will cause out3 to rebuild
// regardless of restat.
string err;
EXPECT_TRUE(builder_.AddTarget("out3", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
EXPECT_EQ("[3/3]", builder_.status_->FormatProgressStatus("[%s/%t]",
BuildStatus::kEdgeStarted));
command_runner_.commands_ran_.clear();
state_.Reset();
fs_.Tick();
fs_.Create("in", "");
// "cc" touches out1, so we should build out2. But because "true" does not
// touch out2, we should cancel the build of out3.
EXPECT_TRUE(builder_.AddTarget("out3", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// If we run again, it should be a no-op, because the build log has recorded
// that we've already built out2 with an input timestamp of 2 (from out1).
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out3", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
fs_.Tick();
fs_.Create("in", "");
// The build log entry should not, however, prevent us from rebuilding out2
// if out1 changes.
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out3", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
}
TEST_F(BuildWithLogTest, RestatMissingFile) {
// If a restat rule doesn't create its output, and the output didn't
// exist before the rule was run, consider that behavior equivalent
// to a rule that doesn't modify its existent output file.
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule true\n"
" command = true\n"
" restat = 1\n"
"rule cc\n"
" command = cc\n"
"build out1: true in\n"
"build out2: cc out1\n"));
fs_.Create("in", "");
fs_.Create("out2", "");
// Do a pre-build so that there's commands in the log for the outputs,
// otherwise, the lack of an entry in the build log will cause out2 to rebuild
// regardless of restat.
string err;
EXPECT_TRUE(builder_.AddTarget("out2", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
command_runner_.commands_ran_.clear();
state_.Reset();
fs_.Tick();
fs_.Create("in", "");
fs_.Create("out2", "");
// Run a build, expect only the first command to run.
// It doesn't touch its output (due to being the "true" command), so
// we shouldn't run the dependent build.
EXPECT_TRUE(builder_.AddTarget("out2", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildWithLogTest, RestatSingleDependentOutputDirty) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule true\n"
" command = true\n"
" restat = 1\n"
"rule touch\n"
" command = touch\n"
"build out1: true in\n"
"build out2 out3: touch out1\n"
"build out4: touch out2\n"
));
// Create the necessary files
fs_.Create("in", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out4", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
fs_.Tick();
fs_.Create("in", "");
fs_.RemoveFile("out3");
// Since "in" is missing, out1 will be built. Since "out3" is missing,
// out2 and out3 will be built even though "in" is not touched when built.
// Then, since out2 is rebuilt, out4 should be rebuilt -- the restat on the
// "true" rule should not lead to the "touch" edge writing out2 and out3 being
// cleard.
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out4", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ("", err);
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
}
// Test scenario, in which an input file is removed, but output isn't changed
// https://github.com/ninja-build/ninja/issues/295
TEST_F(BuildWithLogTest, RestatMissingInput) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule true\n"
" command = true\n"
" depfile = $out.d\n"
" restat = 1\n"
"rule cc\n"
" command = cc\n"
"build out1: true in\n"
"build out2: cc out1\n"));
// Create all necessary files
fs_.Create("in", "");
// The implicit dependencies and the depfile itself
// are newer than the output
TimeStamp restat_mtime = fs_.Tick();
fs_.Create("out1.d", "out1: will.be.deleted restat.file\n");
fs_.Create("will.be.deleted", "");
fs_.Create("restat.file", "");
// Run the build, out1 and out2 get built
string err;
EXPECT_TRUE(builder_.AddTarget("out2", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// See that an entry in the logfile is created, capturing
// the right mtime
BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out1");
ASSERT_TRUE(NULL != log_entry);
ASSERT_EQ(restat_mtime, log_entry->mtime);
// Now remove a file, referenced from depfile, so that target becomes
// dirty, but the output does not change
fs_.RemoveFile("will.be.deleted");
// Trigger the build again - only out1 gets built
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out2", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
// Check that the logfile entry remains correctly set
log_entry = build_log_.LookupByOutput("out1");
ASSERT_TRUE(NULL != log_entry);
ASSERT_EQ(restat_mtime, log_entry->mtime);
}
struct BuildDryRun : public BuildWithLogTest {
BuildDryRun() {
config_.dry_run = true;
}
};
TEST_F(BuildDryRun, AllCommandsShown) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule true\n"
" command = true\n"
" restat = 1\n"
"rule cc\n"
" command = cc\n"
" restat = 1\n"
"build out1: cc in\n"
"build out2: true out1\n"
"build out3: cat out2\n"));
fs_.Create("out1", "");
fs_.Create("out2", "");
fs_.Create("out3", "");
fs_.Tick();
fs_.Create("in", "");
// "cc" touches out1, so we should build out2. But because "true" does not
// touch out2, we should cancel the build of out3.
string err;
EXPECT_TRUE(builder_.AddTarget("out3", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
}
// Test that RSP files are created when & where appropriate and deleted after
// successful execution.
TEST_F(BuildTest, RspFileSuccess)
{
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cat_rsp\n"
" command = cat $rspfile > $out\n"
" rspfile = $rspfile\n"
" rspfile_content = $long_command\n"
"rule cat_rsp_out\n"
" command = cat $rspfile > $out\n"
" rspfile = $out.rsp\n"
" rspfile_content = $long_command\n"
"build out1: cat in\n"
"build out2: cat_rsp in\n"
" rspfile = out 2.rsp\n"
" long_command = Some very long command\n"
"build out$ 3: cat_rsp_out in\n"
" long_command = Some very long command\n"));
fs_.Create("out1", "");
fs_.Create("out2", "");
fs_.Create("out 3", "");
fs_.Tick();
fs_.Create("in", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out1", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AddTarget("out2", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AddTarget("out 3", &err));
ASSERT_EQ("", err);
size_t files_created = fs_.files_created_.size();
size_t files_removed = fs_.files_removed_.size();
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
// The RSP files were created
ASSERT_EQ(files_created + 2, fs_.files_created_.size());
ASSERT_EQ(1u, fs_.files_created_.count("out 2.rsp"));
ASSERT_EQ(1u, fs_.files_created_.count("out 3.rsp"));
// The RSP files were removed
ASSERT_EQ(files_removed + 2, fs_.files_removed_.size());
ASSERT_EQ(1u, fs_.files_removed_.count("out 2.rsp"));
ASSERT_EQ(1u, fs_.files_removed_.count("out 3.rsp"));
}
// Test that RSP file is created but not removed for commands, which fail
TEST_F(BuildTest, RspFileFailure) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule fail\n"
" command = fail\n"
" rspfile = $rspfile\n"
" rspfile_content = $long_command\n"
"build out: fail in\n"
" rspfile = out.rsp\n"
" long_command = Another very long command\n"));
fs_.Create("out", "");
fs_.Tick();
fs_.Create("in", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out", &err));
ASSERT_EQ("", err);
size_t files_created = fs_.files_created_.size();
size_t files_removed = fs_.files_removed_.size();
EXPECT_FALSE(builder_.Build(&err));
ASSERT_EQ("subcommand failed", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
// The RSP file was created
ASSERT_EQ(files_created + 1, fs_.files_created_.size());
ASSERT_EQ(1u, fs_.files_created_.count("out.rsp"));
// The RSP file was NOT removed
ASSERT_EQ(files_removed, fs_.files_removed_.size());
ASSERT_EQ(0u, fs_.files_removed_.count("out.rsp"));
// The RSP file contains what it should
ASSERT_EQ("Another very long command", fs_.files_["out.rsp"].contents);
}
// Test that contents of the RSP file behaves like a regular part of
// command line, i.e. triggers a rebuild if changed
TEST_F(BuildWithLogTest, RspFileCmdLineChange) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cat_rsp\n"
" command = cat $rspfile > $out\n"
" rspfile = $rspfile\n"
" rspfile_content = $long_command\n"
"build out: cat_rsp in\n"
" rspfile = out.rsp\n"
" long_command = Original very long command\n"));
fs_.Create("out", "");
fs_.Tick();
fs_.Create("in", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out", &err));
ASSERT_EQ("", err);
// 1. Build for the 1st time (-> populate log)
EXPECT_TRUE(builder_.Build(&err));
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
// 2. Build again (no change)
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out", &err));
EXPECT_EQ("", err);
ASSERT_TRUE(builder_.AlreadyUpToDate());
// 3. Alter the entry in the logfile
// (to simulate a change in the command line between 2 builds)
BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out");
ASSERT_TRUE(NULL != log_entry);
ASSERT_NO_FATAL_FAILURE(AssertHash(
"cat out.rsp > out;rspfile=Original very long command",
log_entry->command_hash));
log_entry->command_hash++; // Change the command hash to something else.
// Now expect the target to be rebuilt
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out", &err));
EXPECT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, InterruptCleanup) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule interrupt\n"
" command = interrupt\n"
"rule touch-interrupt\n"
" command = touch-interrupt\n"
"build out1: interrupt in1\n"
"build out2: touch-interrupt in2\n"));
fs_.Create("out1", "");
fs_.Create("out2", "");
fs_.Tick();
fs_.Create("in1", "");
fs_.Create("in2", "");
// An untouched output of an interrupted command should be retained.
string err;
EXPECT_TRUE(builder_.AddTarget("out1", &err));
EXPECT_EQ("", err);
EXPECT_FALSE(builder_.Build(&err));
EXPECT_EQ("interrupted by user", err);
builder_.Cleanup();
EXPECT_GT(fs_.Stat("out1", &err), 0);
err = "";
// A touched output of an interrupted command should be deleted.
EXPECT_TRUE(builder_.AddTarget("out2", &err));
EXPECT_EQ("", err);
EXPECT_FALSE(builder_.Build(&err));
EXPECT_EQ("interrupted by user", err);
builder_.Cleanup();
EXPECT_EQ(0, fs_.Stat("out2", &err));
}
TEST_F(BuildTest, StatFailureAbortsBuild) {
const string kTooLongToStat(400, 'i');
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
("build " + kTooLongToStat + ": cat in\n").c_str()));
fs_.Create("in", "");
// This simulates a stat failure:
fs_.files_[kTooLongToStat].mtime = -1;
fs_.files_[kTooLongToStat].stat_error = "stat failed";
string err;
EXPECT_FALSE(builder_.AddTarget(kTooLongToStat, &err));
EXPECT_EQ("stat failed", err);
}
TEST_F(BuildTest, PhonyWithNoInputs) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build nonexistent: phony\n"
"build out1: cat || nonexistent\n"
"build out2: cat nonexistent\n"));
fs_.Create("out1", "");
fs_.Create("out2", "");
// out1 should be up to date even though its input is dirty, because its
// order-only dependency has nothing to do.
string err;
EXPECT_TRUE(builder_.AddTarget("out1", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.AlreadyUpToDate());
// out2 should still be out of date though, because its input is dirty.
err.clear();
command_runner_.commands_ran_.clear();
state_.Reset();
EXPECT_TRUE(builder_.AddTarget("out2", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, DepsGccWithEmptyDepfileErrorsOut) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule cc\n"
" command = cc\n"
" deps = gcc\n"
"build out: cc\n"));
Dirty("out");
string err;
EXPECT_TRUE(builder_.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_FALSE(builder_.AlreadyUpToDate());
EXPECT_FALSE(builder_.Build(&err));
ASSERT_EQ("subcommand failed", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, StatusFormatElapsed) {
status_.BuildStarted();
// Before any task is done, the elapsed time must be zero.
EXPECT_EQ("[%/e0.000]",
status_.FormatProgressStatus("[%%/e%e]",
BuildStatus::kEdgeStarted));
}
TEST_F(BuildTest, StatusFormatReplacePlaceholder) {
EXPECT_EQ("[%/s0/t0/r0/u0/f0]",
status_.FormatProgressStatus("[%%/s%s/t%t/r%r/u%u/f%f]",
BuildStatus::kEdgeStarted));
}
TEST_F(BuildTest, FailedDepsParse) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"build bad_deps.o: cat in1\n"
" deps = gcc\n"
" depfile = in1.d\n"));
string err;
EXPECT_TRUE(builder_.AddTarget("bad_deps.o", &err));
ASSERT_EQ("", err);
// These deps will fail to parse, as they should only have one
// path to the left of the colon.
fs_.Create("in1.d", "AAA BBB");
EXPECT_FALSE(builder_.Build(&err));
EXPECT_EQ("subcommand failed", err);
}
/// Tests of builds involving deps logs necessarily must span
/// multiple builds. We reuse methods on BuildTest but not the
/// builder_ it sets up, because we want pristine objects for
/// each build.
struct BuildWithDepsLogTest : public BuildTest {
BuildWithDepsLogTest() {}
virtual void SetUp() {
BuildTest::SetUp();
temp_dir_.CreateAndEnter("BuildWithDepsLogTest");
}
virtual void TearDown() {
temp_dir_.Cleanup();
}
ScopedTempDir temp_dir_;
/// Shadow parent class builder_ so we don't accidentally use it.
void* builder_;
};
/// Run a straightforwad build where the deps log is used.
TEST_F(BuildWithDepsLogTest, Straightforward) {
string err;
// Note: in1 was created by the superclass SetUp().
const char* manifest =
"build out: cat in1\n"
" deps = gcc\n"
" depfile = in1.d\n";
{
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Run the build once, everything should be ok.
DepsLog deps_log;
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
fs_.Create("in1.d", "out: in2");
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
// The deps file should have been removed.
EXPECT_EQ(0, fs_.Stat("in1.d", &err));
// Recreate it for the next step.
fs_.Create("in1.d", "out: in2");
deps_log.Close();
builder.command_runner_.release();
}
{
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Touch the file only mentioned in the deps.
fs_.Tick();
fs_.Create("in2", "");
// Run the build again.
DepsLog deps_log;
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
command_runner_.commands_ran_.clear();
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
// We should have rebuilt the output due to in2 being
// out of date.
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
builder.command_runner_.release();
}
}
/// Verify that obsolete dependency info causes a rebuild.
/// 1) Run a successful build where everything has time t, record deps.
/// 2) Move input/output to time t+1 -- despite files in alignment,
/// should still need to rebuild due to deps at older time.
TEST_F(BuildWithDepsLogTest, ObsoleteDeps) {
string err;
// Note: in1 was created by the superclass SetUp().
const char* manifest =
"build out: cat in1\n"
" deps = gcc\n"
" depfile = in1.d\n";
{
// Run an ordinary build that gathers dependencies.
fs_.Create("in1", "");
fs_.Create("in1.d", "out: ");
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Run the build once, everything should be ok.
DepsLog deps_log;
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
deps_log.Close();
builder.command_runner_.release();
}
// Push all files one tick forward so that only the deps are out
// of date.
fs_.Tick();
fs_.Create("in1", "");
fs_.Create("out", "");
// The deps file should have been removed, so no need to timestamp it.
EXPECT_EQ(0, fs_.Stat("in1.d", &err));
{
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
command_runner_.commands_ran_.clear();
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
// Recreate the deps file here because the build expects them to exist.
fs_.Create("in1.d", "out: ");
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
// We should have rebuilt the output due to the deps being
// out of date.
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
builder.command_runner_.release();
}
}
TEST_F(BuildWithDepsLogTest, DepsIgnoredInDryRun) {
const char* manifest =
"build out: cat in1\n"
" deps = gcc\n"
" depfile = in1.d\n";
fs_.Create("out", "");
fs_.Tick();
fs_.Create("in1", "");
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// The deps log is NULL in dry runs.
config_.dry_run = true;
Builder builder(&state, config_, NULL, NULL, &fs_);
builder.command_runner_.reset(&command_runner_);
command_runner_.commands_ran_.clear();
string err;
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder.Build(&err));
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
builder.command_runner_.release();
}
/// Check that a restat rule generating a header cancels compilations correctly.
TEST_F(BuildTest, RestatDepfileDependency) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule true\n"
" command = true\n" // Would be "write if out-of-date" in reality.
" restat = 1\n"
"build header.h: true header.in\n"
"build out: cat in1\n"
" depfile = in1.d\n"));
fs_.Create("header.h", "");
fs_.Create("in1.d", "out: header.h");
fs_.Tick();
fs_.Create("header.in", "");
string err;
EXPECT_TRUE(builder_.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
}
/// Check that a restat rule generating a header cancels compilations correctly,
/// depslog case.
TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) {
string err;
// Note: in1 was created by the superclass SetUp().
const char* manifest =
"rule true\n"
" command = true\n" // Would be "write if out-of-date" in reality.
" restat = 1\n"
"build header.h: true header.in\n"
"build out: cat in1\n"
" deps = gcc\n"
" depfile = in1.d\n";
{
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Run the build once, everything should be ok.
DepsLog deps_log;
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
fs_.Create("in1.d", "out: header.h");
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
deps_log.Close();
builder.command_runner_.release();
}
{
State state;
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Touch the input of the restat rule.
fs_.Tick();
fs_.Create("header.in", "");
// Run the build again.
DepsLog deps_log;
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
command_runner_.commands_ran_.clear();
EXPECT_TRUE(builder.AddTarget("out", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
// Rule "true" should have run again, but the build of "out" should have
// been cancelled due to restat propagating through the depfile header.
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
builder.command_runner_.release();
}
}
TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) {
string err;
const char* manifest =
"rule cc\n command = cc $in\n depfile = $out.d\n deps = gcc\n"
"build fo$ o.o: cc foo.c\n";
fs_.Create("foo.c", "");
{
State state;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Run the build once, everything should be ok.
DepsLog deps_log;
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
EXPECT_TRUE(builder.AddTarget("fo o.o", &err));
ASSERT_EQ("", err);
fs_.Create("fo o.o.d", "fo\\ o.o: blah.h bar.h\n");
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
deps_log.Close();
builder.command_runner_.release();
}
{
State state;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
Edge* edge = state.edges_.back();
state.GetNode("bar.h", 0)->MarkDirty(); // Mark bar.h as missing.
EXPECT_TRUE(builder.AddTarget("fo o.o", &err));
ASSERT_EQ("", err);
// Expect three new edges: one generating fo o.o, and two more from
// loading the depfile.
ASSERT_EQ(3u, state.edges_.size());
// Expect our edge to now have three inputs: foo.c and two headers.
ASSERT_EQ(3u, edge->inputs_.size());
// Expect the command line we generate to only use the original input.
ASSERT_EQ("cc foo.c", edge->EvaluateCommand());
deps_log.Close();
builder.command_runner_.release();
}
}
#ifdef _WIN32
TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) {
string err;
const char* manifest =
"rule cc\n command = cc $in\n depfile = $out.d\n deps = gcc\n"
"build a/b\\c\\d/e/fo$ o.o: cc x\\y/z\\foo.c\n";
fs_.Create("x/y/z/foo.c", "");
{
State state;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
// Run the build once, everything should be ok.
DepsLog deps_log;
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
EXPECT_TRUE(builder.AddTarget("a/b/c/d/e/fo o.o", &err));
ASSERT_EQ("", err);
// Note, different slashes from manifest.
fs_.Create("a/b\\c\\d/e/fo o.o.d",
"a\\b\\c\\d\\e\\fo\\ o.o: blah.h bar.h\n");
EXPECT_TRUE(builder.Build(&err));
EXPECT_EQ("", err);
deps_log.Close();
builder.command_runner_.release();
}
{
State state;
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
DepsLog deps_log;
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
ASSERT_EQ("", err);
Builder builder(&state, config_, NULL, &deps_log, &fs_);
builder.command_runner_.reset(&command_runner_);
Edge* edge = state.edges_.back();
state.GetNode("bar.h", 0)->MarkDirty(); // Mark bar.h as missing.
EXPECT_TRUE(builder.AddTarget("a/b/c/d/e/fo o.o", &err));
ASSERT_EQ("", err);
// Expect three new edges: one generating fo o.o, and two more from
// loading the depfile.
ASSERT_EQ(3u, state.edges_.size());
// Expect our edge to now have three inputs: foo.c and two headers.
ASSERT_EQ(3u, edge->inputs_.size());
// Expect the command line we generate to only use the original input.
// Note, slashes from manifest, not .d.
ASSERT_EQ("cc x\\y/z\\foo.c", edge->EvaluateCommand());
deps_log.Close();
builder.command_runner_.release();
}
}
#endif
/// Check that a restat rule doesn't clear an edge if the depfile is missing.
/// Follows from: https://github.com/ninja-build/ninja/issues/603
TEST_F(BuildTest, RestatMissingDepfile) {
const char* manifest =
"rule true\n"
" command = true\n" // Would be "write if out-of-date" in reality.
" restat = 1\n"
"build header.h: true header.in\n"
"build out: cat header.h\n"
" depfile = out.d\n";
fs_.Create("header.h", "");
fs_.Tick();
fs_.Create("out", "");
fs_.Create("header.in", "");
// Normally, only 'header.h' would be rebuilt, as
// its rule doesn't touch the output and has 'restat=1' set.
// But we are also missing the depfile for 'out',
// which should force its command to run anyway!
RebuildTarget("out", manifest);
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
}
/// Check that a restat rule doesn't clear an edge if the deps are missing.
/// https://github.com/ninja-build/ninja/issues/603
TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
string err;
const char* manifest =
"rule true\n"
" command = true\n" // Would be "write if out-of-date" in reality.
" restat = 1\n"
"build header.h: true header.in\n"
"build out: cat header.h\n"
" deps = gcc\n"
" depfile = out.d\n";
// Build once to populate ninja deps logs from out.d
fs_.Create("header.in", "");
fs_.Create("out.d", "out: header.h");
fs_.Create("header.h", "");
RebuildTarget("out", manifest, "build_log", "ninja_deps");
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// Sanity: this rebuild should be NOOP
RebuildTarget("out", manifest, "build_log", "ninja_deps");
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
// Touch 'header.in', blank dependencies log (create a different one).
// Building header.h triggers 'restat' outputs cleanup.
// Validate that out is rebuilt netherless, as deps are missing.
fs_.Tick();
fs_.Create("header.in", "");
// (switch to a new blank deps_log "ninja_deps2")
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// Sanity: this build should be NOOP
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
// Check that invalidating deps by target timestamp also works here
// Repeat the test but touch target instead of blanking the log.
fs_.Tick();
fs_.Create("header.in", "");
fs_.Create("out", "");
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
// And this build should be NOOP again
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, WrongOutputInDepfileCausesRebuild) {
string err;
const char* manifest =
"rule cc\n"
" command = cc $in\n"
" depfile = $out.d\n"
"build foo.o: cc foo.c\n";
fs_.Create("foo.c", "");
fs_.Create("foo.o", "");
fs_.Create("header.h", "");
fs_.Create("foo.o.d", "bar.o.d: header.h\n");
RebuildTarget("foo.o", manifest, "build_log", "ninja_deps");
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
TEST_F(BuildTest, Console) {
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
"rule console\n"
" command = console\n"
" pool = console\n"
"build cons: console in.txt\n"));
fs_.Create("in.txt", "");
string err;
EXPECT_TRUE(builder_.AddTarget("cons", &err));
ASSERT_EQ("", err);
EXPECT_TRUE(builder_.Build(&err));
EXPECT_EQ("", err);
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
}
*/
}
| 30.994424 | 97 | 0.617309 |
4b753a695dd39effec2b07f627e0bc9bc4b0f391 | 6,506 | // Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use self::chain::types::NoopAdapter;
use self::chain::ErrorKind;
use self::core::core::verifier_cache::LruVerifierCache;
use self::core::global::{self, ChainTypes};
use self::core::libtx::{self, build};
use self::core::pow::Difficulty;
use self::core::{consensus, pow};
use self::keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use self::util::{Mutex, RwLock, StopState};
use chrono::Duration;
use env_logger;
use grin_chain as chain;
use grin_core as core;
use grin_keychain as keychain;
use grin_store as store;
use grin_util as util;
use std::fs;
use std::sync::Arc;
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
}
#[test]
fn test_coinbase_maturity() {
let _ = env_logger::init();
clean_output_dir(".grin");
global::set_mining_mode(ChainTypes::AutomatedTesting);
let genesis_block = pow::mine_genesis_block().unwrap();
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
let db_env = Arc::new(store::new_env(".grin".to_string()));
let chain = chain::Chain::init(
".grin".to_string(),
db_env,
Arc::new(NoopAdapter {}),
genesis_block,
pow::verify_size,
verifier_cache,
false,
Arc::new(Mutex::new(StopState::new())),
)
.unwrap();
let prev = chain.head_header().unwrap();
let keychain = ExtKeychain::from_random_seed(false).unwrap();
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
let key_id4 = ExtKeychainPath::new(1, 4, 0, 0, 0).to_identifier();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
let reward = libtx::reward::output(&keychain, &key_id1, 0).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block).unwrap();
pow::pow_size(
&mut block.header,
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
)
.unwrap();
assert_eq!(block.outputs().len(), 1);
let coinbase_output = block.outputs()[0];
assert!(coinbase_output.is_coinbase());
chain
.process_block(block.clone(), chain::Options::MINE)
.unwrap();
let prev = chain.head_header().unwrap();
let amount = consensus::REWARD;
let lock_height = 1 + global::coinbase_maturity();
assert_eq!(lock_height, 4);
// here we build a tx that attempts to spend the earlier coinbase output
// this is not a valid tx as the coinbase output cannot be spent yet
let coinbase_txn = build::transaction(
vec![
build::coinbase_input(amount, key_id1.clone()),
build::output(amount - 2, key_id2.clone()),
build::with_fee(2),
],
&keychain,
)
.unwrap();
let txs = vec![coinbase_txn.clone()];
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id3, fees).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block).unwrap();
// Confirm the tx attempting to spend the coinbase output
// is not valid at the current block height given the current chain state.
match chain.verify_coinbase_maturity(&coinbase_txn) {
Ok(_) => {}
Err(e) => match e.kind() {
ErrorKind::ImmatureCoinbase => {}
_ => panic!("Expected transaction error with immature coinbase."),
},
}
pow::pow_size(
&mut block.header,
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
)
.unwrap();
// mine enough blocks to increase the height sufficiently for
// coinbase to reach maturity and be spendable in the next block
for _ in 0..3 {
let prev = chain.head_header().unwrap();
let keychain = ExtKeychain::from_random_seed(false).unwrap();
let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block).unwrap();
pow::pow_size(
&mut block.header,
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
)
.unwrap();
chain.process_block(block, chain::Options::MINE).unwrap();
}
let prev = chain.head_header().unwrap();
// Confirm the tx spending the coinbase output is now valid.
// The coinbase output has matured sufficiently based on current chain state.
chain.verify_coinbase_maturity(&coinbase_txn).unwrap();
let txs = vec![coinbase_txn];
let fees = txs.iter().map(|tx| tx.fee()).sum();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
let reward = libtx::reward::output(&keychain, &key_id4, fees).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block).unwrap();
pow::pow_size(
&mut block.header,
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
)
.unwrap();
let result = chain.process_block(block, chain::Options::MINE);
match result {
Ok(_) => (),
Err(_) => panic!("we did not expect an error here"),
};
}
| 32.858586 | 92 | 0.713956 |
d788204c851a4f8c24c0d8dda54da5a297c57a13 | 8,813 | //! Represents AVM1 scope chain resolution.
use crate::avm1::activation::Activation;
use crate::avm1::callable_value::CallableValue;
use crate::avm1::error::Error;
use crate::avm1::property::Attribute;
use crate::avm1::{AvmString, Object, ScriptObject, TObject, Value};
use gc_arena::{Collect, GcCell, MutationContext};
use std::cell::Ref;
/// Indicates what kind of scope a scope is.
#[derive(Copy, Clone, Debug, PartialEq, Collect)]
#[collect(require_static)]
pub enum ScopeClass {
/// Scope represents global scope.
Global,
/// Target represents timeline scope. All timeline actions execute with
/// the current clip object in lieu of a local scope, and the timeline scope
/// can be changed via `tellTarget`.
Target,
/// Scope represents local scope and is inherited when a closure is defined.
Local,
/// Scope represents an object added to the scope chain with `with`.
/// It is not inherited when closures are defined.
With,
}
/// Represents a scope chain for an AVM1 activation.
#[derive(Debug, Collect)]
#[collect(no_drop)]
pub struct Scope<'gc> {
parent: Option<GcCell<'gc, Scope<'gc>>>,
class: ScopeClass,
values: Object<'gc>,
}
impl<'gc> Scope<'gc> {
/// Construct a global scope (one without a parent).
pub fn from_global_object(globals: Object<'gc>) -> Scope<'gc> {
Scope {
parent: None,
class: ScopeClass::Global,
values: globals,
}
}
/// Construct a child scope of another scope.
pub fn new_local_scope(parent: GcCell<'gc, Self>, mc: MutationContext<'gc, '_>) -> Scope<'gc> {
Scope {
parent: Some(parent),
class: ScopeClass::Local,
values: ScriptObject::object_cell(mc, None),
}
}
/// Construct a scope for use with `tellTarget` code where the timeline
/// scope has been replaced with another given object.
pub fn new_target_scope(
mut parent: GcCell<'gc, Self>,
clip: Object<'gc>,
mc: MutationContext<'gc, '_>,
) -> GcCell<'gc, Self> {
let mut bottom_scope = None;
let mut top_scope: Option<GcCell<'gc, Self>> = None;
loop {
let next_scope = GcCell::allocate(
mc,
Self {
parent: None,
class: parent.read().class,
values: parent.read().values,
},
);
if parent.read().class == ScopeClass::Target {
next_scope.write(mc).values = clip;
}
if bottom_scope.is_none() {
bottom_scope = Some(next_scope);
}
if let Some(ref scope) = top_scope {
scope.write(mc).parent = Some(next_scope);
}
top_scope = Some(next_scope);
let grandparent = parent.read().parent;
if let Some(grandparent) = grandparent {
parent = grandparent;
} else {
break;
}
}
bottom_scope.unwrap_or_else(|| {
GcCell::allocate(
mc,
Self {
parent: None,
class: ScopeClass::Global,
values: ScriptObject::object_cell(mc, None),
},
)
})
}
/// Construct a with scope to be used as the scope during a with block.
///
/// A with block adds an object to the top of the scope chain, so unqualified
/// references will try to resolve on that object first.
pub fn new_with_scope(
parent_scope: GcCell<'gc, Self>,
with_object: Object<'gc>,
mc: MutationContext<'gc, '_>,
) -> GcCell<'gc, Self> {
GcCell::allocate(
mc,
Scope {
parent: Some(parent_scope),
class: ScopeClass::With,
values: with_object,
},
)
}
/// Construct an arbitrary scope
pub fn new(
parent: GcCell<'gc, Self>,
class: ScopeClass,
with_object: Object<'gc>,
) -> Scope<'gc> {
Scope {
parent: Some(parent),
class,
values: with_object,
}
}
/// Returns a reference to the current local scope object.
pub fn locals(&self) -> &Object<'gc> {
&self.values
}
/// Returns a reference to the current local scope object.
pub fn locals_cell(&self) -> Object<'gc> {
self.values
}
/// Returns a reference to the current local scope object for mutation.
#[allow(dead_code)]
pub fn locals_mut(&mut self) -> &mut Object<'gc> {
&mut self.values
}
/// Returns a reference to the parent scope object.
pub fn parent(&self) -> Option<Ref<Scope<'gc>>> {
match self.parent {
Some(ref p) => Some(p.read()),
None => None,
}
}
/// Returns a reference to the parent scope object.
pub fn parent_cell(&self) -> Option<GcCell<'gc, Scope<'gc>>> {
self.parent
}
/// Resolve a particular value in the scope chain and the object which this value would expect as its `this` parameter if called.
///
/// Because scopes are object chains, the same rules for `Object::get`
/// still apply here. This function is allowed to yield `None` to indicate
/// that the result will be calculated on the AVM stack.
pub fn resolve(
&self,
name: AvmString<'gc>,
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<CallableValue<'gc>, Error<'gc>> {
if self.locals().has_property(activation, name) {
return self
.locals()
.get(name, activation)
.map(|v| CallableValue::Callable(self.locals_cell(), v));
}
if let Some(scope) = self.parent() {
return scope.resolve(name, activation);
}
Ok(CallableValue::UnCallable(Value::Undefined))
}
/// Update a particular value in the scope chain.
///
/// Traverses the scope chain in search of a value. If it's found, it's overwritten.
/// The traversal stops at Target scopes, which represents the movie clip timeline
/// the code is executing in.
/// If the value is not found, it is defined on this Target scope.
pub fn set(
&self,
name: AvmString<'gc>,
value: Value<'gc>,
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<(), Error<'gc>> {
if self.class == ScopeClass::Target || self.locals().has_property(activation, name) {
// Value found on this object, so overwrite it.
// Or we've hit the executing movie clip, so create it here.
self.locals().set(name, value, activation)
} else if let Some(scope) = self.parent() {
// Traverse the scope chain in search of the value.
scope.set(name, value, activation)
} else {
// This probably shouldn't happen -- all AVM1 code runs in reference to some MovieClip,
// so we should always have a MovieClip scope.
// Define on the top-level scope.
debug_assert!(false, "Scope::set: No top-level movie clip scope");
self.locals().set(name, value, activation)
}
}
/// Define a named local variable on the scope.
///
/// If the property does not already exist on the local scope, it will created.
/// Otherwise, the existing property will be set to `value`. This does not crawl the scope
/// chain. Any proeprties with the same name deeper in the scope chain will be shadowed.
pub fn define_local(
&self,
name: AvmString<'gc>,
value: Value<'gc>,
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<(), Error<'gc>> {
self.locals().set(name, value, activation)
}
/// Create a local property on the activation.
///
/// This inserts a value as a stored property on the local scope. If the property already
/// exists, it will be forcefully overwritten. Used internally to initialize objects.
pub fn force_define_local(
&self,
name: AvmString<'gc>,
value: Value<'gc>,
mc: MutationContext<'gc, '_>,
) {
self.locals()
.define_value(mc, name, value, Attribute::empty());
}
/// Delete a value from scope
pub fn delete(&self, activation: &mut Activation<'_, 'gc, '_>, name: AvmString<'gc>) -> bool {
if self.locals().has_property(activation, name) {
return self.locals().delete(activation, name);
}
if let Some(scope) = self.parent() {
return scope.delete(activation, name);
}
false
}
}
| 33.131579 | 133 | 0.569386 |
56984bfd6a2d3fcab90fc600a1cdd6cf8d458ff2 | 6,135 | use std::fmt;
use std::iter;
/// Valid Rust identifier
#[derive(Eq, PartialEq, Debug, Clone)]
pub(crate) struct RustIdent(String);
impl RustIdent {
pub fn new(s: &str) -> RustIdent {
assert!(!s.is_empty());
assert!(!s.contains("/"), "{}", s);
assert!(!s.contains("."), "{}", s);
assert!(!s.contains(":"), "{}", s);
RustIdent(s.to_owned())
}
pub fn super_ident() -> RustIdent {
RustIdent::new("super")
}
pub fn get(&self) -> &str {
&self.0
}
pub fn _into_string(self) -> String {
self.0
}
pub fn to_path(&self) -> RustIdentWithPath {
RustIdentWithPath::from(&self.0)
}
}
impl fmt::Display for RustIdent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.get(), f)
}
}
impl From<&'_ str> for RustIdent {
fn from(s: &str) -> Self {
RustIdent::new(s)
}
}
impl From<String> for RustIdent {
fn from(s: String) -> Self {
RustIdent::new(&s)
}
}
#[derive(Default, Eq, PartialEq, Debug, Clone)]
pub(crate) struct RustRelativePath {
path: Vec<RustIdent>,
}
impl RustRelativePath {
pub fn into_path(self) -> RustPath {
RustPath {
absolute: false,
path: self,
}
}
pub fn _empty() -> RustRelativePath {
RustRelativePath { path: Vec::new() }
}
pub fn from_components<I: IntoIterator<Item = RustIdent>>(i: I) -> RustRelativePath {
RustRelativePath {
path: i.into_iter().collect(),
}
}
pub fn is_empty(&self) -> bool {
self.path.is_empty()
}
pub fn first(&self) -> Option<RustIdent> {
self.path.iter().cloned().next()
}
pub fn remove_first(&mut self) -> Option<RustIdent> {
if self.path.is_empty() {
None
} else {
Some(self.path.remove(0))
}
}
pub fn prepend_ident(&mut self, ident: RustIdent) {
self.path.insert(0, ident);
}
pub fn append(mut self, path: RustRelativePath) -> RustRelativePath {
for c in path.path {
self.path.push(c);
}
self
}
pub fn push_ident(&mut self, ident: RustIdent) {
self.path.push(ident);
}
pub fn _append_ident(mut self, ident: RustIdent) -> RustRelativePath {
self.push_ident(ident);
self
}
pub fn to_reverse(&self) -> RustRelativePath {
RustRelativePath::from_components(
iter::repeat(RustIdent::super_ident()).take(self.path.len()),
)
}
}
#[derive(Default, Eq, PartialEq, Debug, Clone)]
pub(crate) struct RustPath {
absolute: bool,
path: RustRelativePath,
}
impl fmt::Display for RustRelativePath {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, c) in self.path.iter().enumerate() {
if i != 0 {
write!(f, "::")?;
}
write!(f, "{}", c)?;
}
Ok(())
}
}
impl From<&'_ str> for RustRelativePath {
fn from(s: &str) -> Self {
RustRelativePath {
path: s.split("::").map(RustIdent::from).collect(),
}
}
}
impl RustPath {
pub fn super_path() -> RustPath {
RustPath::from("super")
}
pub fn is_absolute(&self) -> bool {
self.absolute
}
pub fn is_empty(&self) -> bool {
assert!(!self.absolute);
self.path.is_empty()
}
pub fn with_ident(self, ident: RustIdent) -> RustIdentWithPath {
RustIdentWithPath { path: self, ident }
}
pub fn first(&self) -> Option<RustIdent> {
assert!(!self.absolute);
self.path.first()
}
pub fn remove_first(&mut self) -> Option<RustIdent> {
assert!(!self.absolute);
self.path.remove_first()
}
pub fn to_reverse(&self) -> RustPath {
assert!(!self.absolute);
RustPath {
absolute: false,
path: self.path.to_reverse(),
}
}
pub fn prepend_ident(&mut self, ident: RustIdent) {
assert!(!self.absolute);
self.path.prepend_ident(ident);
}
pub fn append(self, path: RustPath) -> RustPath {
if path.absolute {
path
} else {
RustPath {
absolute: self.absolute,
path: self.path.append(path.path),
}
}
}
pub fn append_ident(mut self, ident: RustIdent) -> RustPath {
self.path.path.push(ident);
self
}
pub fn append_with_ident(self, path: RustIdentWithPath) -> RustIdentWithPath {
self.append(path.path).with_ident(path.ident)
}
}
impl From<&'_ str> for RustPath {
fn from(s: &str) -> Self {
let (s, absolute) = if s.starts_with("::") {
(&s[2..], true)
} else {
(s, false)
};
RustPath {
absolute,
path: RustRelativePath::from(s),
}
}
}
impl From<String> for RustPath {
fn from(s: String) -> Self {
RustPath::from(&s[..])
}
}
impl fmt::Display for RustPath {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.absolute {
write!(f, "::")?;
}
write!(f, "{}", self.path)
}
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub(crate) struct RustIdentWithPath {
pub path: RustPath,
pub ident: RustIdent,
}
impl RustIdentWithPath {
pub fn new(s: String) -> RustIdentWithPath {
let mut path = RustPath::from(s);
let ident = path.path.path.pop().unwrap();
RustIdentWithPath { path, ident }
}
pub fn prepend_ident(&mut self, ident: RustIdent) {
self.path.prepend_ident(ident)
}
pub fn to_path(&self) -> RustPath {
self.path.clone().append_ident(self.ident.clone())
}
}
impl<S: Into<String>> From<S> for RustIdentWithPath {
fn from(s: S) -> Self {
RustIdentWithPath::new(s.into())
}
}
impl fmt::Display for RustIdentWithPath {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.to_path(), f)
}
}
| 22.891791 | 89 | 0.540179 |
f7fd757627f587dc52c93984557371df223a74a7 | 40,813 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod operations {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.SignalRService/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod signal_r {
use crate::models::*;
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
location: &str,
parameters: Option<&NameAvailabilityParameters>,
subscription_id: &str,
) -> std::result::Result<NameAvailability, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SignalRService/locations/{}/checkNameAvailability",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(|source| check_name_availability::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| check_name_availability::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| check_name_availability::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| check_name_availability::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| check_name_availability::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: NameAvailability =
serde_json::from_slice(rsp_body).map_err(|source| check_name_availability::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(check_name_availability::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_name_availability {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<SignalRResourceList, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SignalRService/SignalR",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(|source| list_by_subscription::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_by_subscription::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_by_subscription::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_by_subscription::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResourceList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_subscription::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_subscription::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<SignalRResourceList, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(|source| list_by_resource_group::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_by_resource_group::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_by_resource_group::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_by_resource_group::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResourceList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_resource_group::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_resource_group::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<SignalRKeys, list_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}/listKeys",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| list_keys::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_keys::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_keys::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_keys::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRKeys = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_keys::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_keys {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn regenerate_key(
operation_config: &crate::OperationConfig,
parameters: Option<&RegenerateKeyParameters>,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<SignalRKeys, regenerate_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}/regenerateKey",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| regenerate_key::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| regenerate_key::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| regenerate_key::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| regenerate_key::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| regenerate_key::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: SignalRKeys =
serde_json::from_slice(rsp_body).map_err(|source| regenerate_key::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(regenerate_key::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod regenerate_key {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<SignalRResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| get::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| get::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| get::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| get::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
parameters: Option<&SignalRCreateParameters>,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| create_or_update::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| create_or_update::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| create_or_update::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| create_or_update::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| create_or_update::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource =
serde_json::from_slice(rsp_body).map_err(|source| create_or_update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource =
serde_json::from_slice(rsp_body).map_err(|source| create_or_update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(SignalRResource),
Created201(SignalRResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
parameters: Option<&SignalRUpdateParameters>,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| update::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| update::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| update::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| update::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| update::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(SignalRResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| delete::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| delete::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| delete::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| delete::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod usages {
use crate::models::*;
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<SignalRUsageList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SignalRService/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRUsageList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| 48.995198 | 132 | 0.574939 |
e45bbadb287be9131c78166b3e5df7ac7e778c9d | 252 | //! As of autocfg 0.1.8, this is simply re-exporting from autocfg 1.1.0 or later.
//! Please upgrade and refer to the newer version for current documentation!
extern crate autocfg;
pub use autocfg::{emit, new, rerun_env, rerun_path, AutoCfg, Error};
| 36 | 81 | 0.746032 |
796abe94df1cb093fbf9195b1b4789b2f3b67f75 | 4,946 | //! CDC-ACM serial port example using cortex-m-rtfm.
#![no_main]
#![no_std]
#![allow(non_snake_case)]
#![allow(dead_code)]
#![allow(unused_imports)]
use core::{
panic::PanicInfo,
sync::atomic::{self, Ordering},
str::from_utf8_unchecked,
ptr::{
read_volatile,
write_volatile,
},
convert::TryFrom,
mem,
ops::RangeInclusive,
};
use cortex_m::{
interrupt,
asm::*,
};
use embedded_hal::digital::v2::OutputPin;
use rtfm::app;
use stm32f1xx_hal::{
prelude::*,
time::Hertz,
};
use stm32f1xx_hal::{
usb::{
Peripheral,
UsbBus,
UsbBusType,
},
pac::FLASH,
};
use usb_device::{
bus,
device::{
UsbDevice,
UsbDeviceBuilder,
UsbVidPid,
},
UsbError,
};
use usbd_serial::{CdcAcmClass, SerialPort, USB_CLASS_CDC};
use itm_logger::*;
use usb_bootloader::hardware_extra::*;
// VID and PID are from dapboot bluepill bootloader
const USB_VID: u16 = 0x1209;
const USB_PID: u16 = 0xDB42;
const USB_CLASS_MISCELLANEOUS: u8 = 0xEF;
#[cfg(feature = "itm")]
use cortex_m::{iprintln, peripheral::ITM};
#[app(device = stm32f1xx_hal::stm32, peripherals = true)]
const APP: () = {
struct Resources {
usb_dev: UsbDevice<'static, UsbBusType>,
serial: SerialPort<'static, UsbBusType>,
}
#[init]
fn init(cx: init::Context) -> init::LateResources {
static mut USB_BUS: Option<bus::UsbBusAllocator<UsbBusType>> = None;
#[cfg(feature = "itm")]
{
update_tpiu_baudrate(8_000_000, ITM_BAUD_RATE).expect("Failed to reset TPIU baudrate");
logger_init();
}
info!("ITM reset ok.");
let mut flash = cx.device.FLASH.constrain();
let mut rcc = cx.device.RCC.constrain();
let clocks = rcc
.cfgr
.use_hse(8.mhz())
.sysclk(48.mhz())
.pclk1(24.mhz())
.freeze(&mut flash.acr);
#[cfg(feature = "itm")]
{
let sysclk: Hertz = clocks.sysclk().into();
update_tpiu_baudrate(sysclk.0, ITM_BAUD_RATE).expect("Failed to reset TPIU baudrate");
}
assert!(clocks.usbclk_valid());
let flash_kib = FlashSize::get().kibi_bytes();
info!("Flash: {} KiB", flash_kib);
let mut gpioa = cx.device.GPIOA.split(&mut rcc.apb2);
// BluePill board has a pull-up resistor on the D+ line.
// Pull the D+ pin down to send a RESET condition to the USB bus.
// This forced reset is needed only for development, without it host
// will not reset your device when you upload new firmware.
let mut usb_dp = gpioa.pa12.into_push_pull_output(&mut gpioa.crh);
usb_dp.set_low().unwrap();
delay(clocks.sysclk().0 / 100);
let usb_dm = gpioa.pa11;
let usb_dp = usb_dp.into_floating_input(&mut gpioa.crh);
let usb = Peripheral {
usb: cx.device.USB,
pin_dm: usb_dm,
pin_dp: usb_dp,
};
*USB_BUS = Some(UsbBus::new(usb));
let serial = SerialPort::new(USB_BUS.as_ref().unwrap());
let serial_number = get_serial_number();
info!("Serial number: {}", serial_number);
let usb_dev = UsbDeviceBuilder::new(USB_BUS.as_ref().unwrap(), UsbVidPid(USB_VID, USB_PID))
.manufacturer("Fake company")
.product("Serial port")
.serial_number(serial_number)
.self_powered(true)
.device_class(USB_CLASS_CDC)
.build();
init::LateResources { usb_dev, serial }
}
#[task(binds = USB_HP_CAN_TX, resources = [usb_dev, serial])]
fn usb_tx(mut cx: usb_tx::Context) {
usb_poll(&mut cx.resources.usb_dev, &mut cx.resources.serial);
}
#[task(binds = USB_LP_CAN_RX0, resources = [usb_dev, serial])]
fn usb_rx0(mut cx: usb_rx0::Context) {
usb_poll(&mut cx.resources.usb_dev, &mut cx.resources.serial);
}
};
fn usb_poll<B: bus::UsbBus>(
usb_dev: &mut UsbDevice<'static, B>,
serial: &mut SerialPort<'static, B>,
) {
if !usb_dev.poll(&mut [serial]) {
return;
}
let mut buf = [0; 64];
match serial.read(&mut buf) {
Ok(count) => {
let _ = serial.write(&buf[..count]);
},
Err(UsbError::WouldBlock) => {},
Err(e) => info!("Err: {:?}", e),
}
}
#[panic_handler]
fn panic(
#[cfg_attr(not(feature = "itm"), allow(unused_variables))]
info: &PanicInfo
) -> ! {
interrupt::disable();
#[cfg(feature = "itm")]
{
let itm = unsafe { &mut *ITM::ptr() };
let stim = &mut itm.stim[0];
iprintln!(stim, "{}", info);
}
loop {
// add some side effect to prevent this from turning into a UDF instruction
// see rust-lang/rust#28728 for details
atomic::compiler_fence(Ordering::SeqCst)
}
} | 26.308511 | 99 | 0.577436 |
9c188ad7282448ecc1099079e7669101a2143cce | 1,795 | // Copyright (c) 2021-present, Cruise LLC
//
// This source code is licensed under the Apache License, Version 2.0,
// found in the LICENSE-APACHE file in the root directory of this source tree.
// You may not use this file except in compliance with the License.
use std::cell::UnsafeCell;
use std::ptr;
use super::StrongPtr;
use crate::runtime::{self, Object};
// Our pointer must have the same address even if we are moved, so Box it.
// Although loading the WeakPtr may modify the pointer, it is thread safe,
// so we must use an UnsafeCell to get a *mut without self being mutable.
/// A pointer that weakly references an object, allowing to safely check
/// whether it has been deallocated.
pub struct WeakPtr(Box<UnsafeCell<*mut Object>>);
impl WeakPtr {
/// Constructs a `WeakPtr` to the given object.
/// # Safety
/// Unsafe because the caller must ensure the given object pointer is valid.
pub unsafe fn new(obj: *mut Object) -> Self {
let ptr = Box::new(UnsafeCell::new(ptr::null_mut()));
runtime::objc_initWeak(ptr.get(), obj);
WeakPtr(ptr)
}
/// Loads the object self points to, returning a `StrongPtr`.
/// If the object has been deallocated, the returned pointer will be null.
pub fn load(&self) -> StrongPtr {
unsafe {
let ptr = runtime::objc_loadWeakRetained(self.0.get());
StrongPtr::new(ptr)
}
}
}
impl Drop for WeakPtr {
fn drop(&mut self) {
unsafe {
runtime::objc_destroyWeak(self.0.get());
}
}
}
impl Clone for WeakPtr {
fn clone(&self) -> Self {
let ptr = Box::new(UnsafeCell::new(ptr::null_mut()));
unsafe {
runtime::objc_copyWeak(ptr.get(), self.0.get());
}
WeakPtr(ptr)
}
}
| 30.948276 | 80 | 0.640111 |
039bd1e8f21aabddbdd881d67e87b1b3e7a0358b | 19,301 | // Copyright 2018-2020 argmin developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! * [More-Thuente line search](struct.MoreThuenteLineSearch.html)
//!
//! TODO: Apparently it is missing stopping criteria!
//!
//! This implementation follows the excellent MATLAB implementation of Dianne P. O'Leary at
//! http://www.cs.umd.edu/users/oleary/software/
//!
//! # Reference
//!
//! Jorge J. More and David J. Thuente. "Line search algorithms with guaranteed sufficient
//! decrease." ACM Trans. Math. Softw. 20, 3 (September 1994), 286-307.
//! DOI: https://doi.org/10.1145/192115.192132
use crate::prelude::*;
#[cfg(feature = "serde1")]
use serde::{Deserialize, Serialize};
use std::default::Default;
/// The More-Thuente line search is a method to find a step length which obeys the strong Wolfe
/// conditions.
///
/// [Example](https://github.com/argmin-rs/argmin/blob/master/examples/morethuente.rs)
///
/// # References
///
/// This implementation follows the excellent MATLAB implementation of Dianne P. O'Leary at
/// http://www.cs.umd.edu/users/oleary/software/
///
/// [0] Jorge J. More and David J. Thuente. "Line search algorithms with guaranteed sufficient
/// decrease." ACM Trans. Math. Softw. 20, 3 (September 1994), 286-307.
/// DOI: https://doi.org/10.1145/192115.192132
#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))]
#[derive(Clone)]
pub struct MoreThuenteLineSearch<P> {
/// Search direction (builder)
search_direction_b: Option<P>,
/// initial parameter vector
init_param: P,
/// initial cost
finit: f64,
/// initial gradient
init_grad: P,
/// Search direction
search_direction: P,
/// Search direction in 1D
dginit: f64,
/// dgtest
dgtest: f64,
/// c1
ftol: f64,
/// c2
gtol: f64,
/// xtrapf?
xtrapf: f64,
/// width of interval
width: f64,
/// width of what?
width1: f64,
/// xtol
xtol: f64,
/// alpha
alpha: f64,
/// stpmin
stpmin: f64,
/// stpmax
stpmax: f64,
/// current step
stp: Step,
/// stx
stx: Step,
/// sty
sty: Step,
/// f
f: f64,
/// bracketed
brackt: bool,
/// stage1
stage1: bool,
/// infoc
infoc: usize,
}
#[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))]
#[derive(Default, Clone)]
struct Step {
pub x: f64,
pub fx: f64,
pub gx: f64,
}
impl Step {
pub fn new(x: f64, fx: f64, gx: f64) -> Self {
Step { x, fx, gx }
}
}
impl<P: Default> MoreThuenteLineSearch<P> {
/// Constructor
pub fn new() -> Self {
MoreThuenteLineSearch {
search_direction_b: None,
init_param: P::default(),
finit: std::f64::INFINITY,
init_grad: P::default(),
search_direction: P::default(),
dginit: 0.0,
dgtest: 0.0,
ftol: 1e-4,
gtol: 0.9,
xtrapf: 4.0,
width: std::f64::NAN,
width1: std::f64::NAN,
xtol: 1e-10,
alpha: 1.0,
stpmin: std::f64::EPSILON.sqrt(),
stpmax: std::f64::INFINITY,
stp: Step::default(),
stx: Step::default(),
sty: Step::default(),
f: std::f64::NAN,
brackt: false,
stage1: true,
infoc: 1,
}
}
/// Set c1 and c2 where 0 < c1 < c2 < 1.
pub fn c(mut self, c1: f64, c2: f64) -> Result<Self, Error> {
if c1 <= 0.0 || c1 >= c2 {
return Err(ArgminError::InvalidParameter {
text: "MoreThuenteLineSearch: Parameter c1 must be in (0, c2).".to_string(),
}
.into());
}
if c2 <= c1 || c2 >= 1.0 {
return Err(ArgminError::InvalidParameter {
text: "MoreThuenteLineSearch: Parameter c2 must be in (c1, 1).".to_string(),
}
.into());
}
self.ftol = c1;
self.gtol = c2;
Ok(self)
}
/// set alpha limits
pub fn alpha(mut self, alpha_min: f64, alpha_max: f64) -> Result<Self, Error> {
if alpha_min < 0.0 {
return Err(ArgminError::InvalidParameter {
text: "MoreThuenteLineSearch: alpha_min must be >= 0.0.".to_string(),
}
.into());
}
if alpha_max <= alpha_min {
return Err(ArgminError::InvalidParameter {
text: "MoreThuenteLineSearch: alpha_min must be smaller than alpha_max."
.to_string(),
}
.into());
}
self.stpmin = alpha_min;
self.stpmax = alpha_max;
Ok(self)
}
}
impl<P: Default> Default for MoreThuenteLineSearch<P> {
fn default() -> Self {
MoreThuenteLineSearch::new()
}
}
impl<P> ArgminLineSearch<P> for MoreThuenteLineSearch<P>
where
P: Clone
+ SerializeAlias
+ DeserializeOwnedAlias
+ ArgminSub<P, P>
+ ArgminDot<P, f64>
+ ArgminScaledAdd<P, f64, P>,
{
/// Set search direction
fn set_search_direction(&mut self, search_direction: P) {
self.search_direction_b = Some(search_direction);
}
/// Set initial alpha value
fn set_init_alpha(&mut self, alpha: f64) -> Result<(), Error> {
if alpha <= 0.0 {
return Err(ArgminError::InvalidParameter {
text: "MoreThuenteLineSearch: Initial alpha must be > 0.".to_string(),
}
.into());
}
self.alpha = alpha;
Ok(())
}
}
impl<P, O> Solver<O> for MoreThuenteLineSearch<P>
where
O: ArgminOp<Param = P, Output = f64>,
P: Clone
+ SerializeAlias
+ DeserializeOwnedAlias
+ ArgminSub<P, P>
+ ArgminDot<P, f64>
+ ArgminScaledAdd<P, f64, P>,
{
const NAME: &'static str = "More-Thuente Line search";
fn init(
&mut self,
op: &mut OpWrapper<O>,
state: &IterState<O>,
) -> Result<Option<ArgminIterData<O>>, Error> {
self.search_direction = check_param!(
self.search_direction_b,
"MoreThuenteLineSearch: Search direction not initialized. Call `set_search_direction`."
);
self.init_param = state.get_param();
let cost = state.get_cost();
self.finit = if cost == std::f64::INFINITY {
op.apply(&self.init_param)?
} else {
cost
};
self.init_grad = state
.get_grad()
.unwrap_or_else(|| op.gradient(&self.init_param).unwrap());
self.dginit = self.init_grad.dot(&self.search_direction);
// compute search direction in 1D
if self.dginit >= 0.0 {
return Err(ArgminError::ConditionViolated {
text: "MoreThuenteLineSearch: Search direction must be a descent direction."
.to_string(),
}
.into());
}
self.stage1 = true;
self.brackt = false;
self.dgtest = self.ftol * self.dginit;
self.width = self.stpmax - self.stpmin;
self.width1 = 2.0 * self.width;
self.f = self.finit;
self.stp = Step::new(self.alpha, std::f64::NAN, std::f64::NAN);
self.stx = Step::new(0.0, self.finit, self.dginit);
self.sty = Step::new(0.0, self.finit, self.dginit);
Ok(None)
}
fn next_iter(
&mut self,
op: &mut OpWrapper<O>,
_state: &IterState<O>,
) -> Result<ArgminIterData<O>, Error> {
// set the minimum and maximum steps to correspond to the present interval of uncertainty
let mut info = 0;
let (stmin, stmax) = if self.brackt {
(self.stx.x.min(self.sty.x), self.stx.x.max(self.sty.x))
} else {
(
self.stx.x,
self.stp.x + self.xtrapf * (self.stp.x - self.stx.x),
)
};
// alpha needs to be within bounds
self.stp.x = self.stp.x.max(self.stpmin);
self.stp.x = self.stp.x.min(self.stpmax);
// If an unusual termination is to occur then let alpha be the lowest point obtained so
// far.
if (self.brackt && (self.stp.x <= stmin || self.stp.x >= stmax))
|| (self.brackt && (stmax - stmin) <= self.xtol * stmax)
|| self.infoc == 0
{
self.stp.x = self.stx.x;
}
// Evaluate the function and gradient at new stp.x and compute the directional derivative
let new_param = self
.init_param
.scaled_add(&self.stp.x, &self.search_direction);
self.f = op.apply(&new_param)?;
let new_grad = op.gradient(&new_param)?;
let cur_cost = self.f;
let cur_param = new_param;
let cur_grad = new_grad.clone();
// self.stx.fx = new_cost;
let dg = self.search_direction.dot(&new_grad);
let ftest1 = self.finit + self.stp.x * self.dgtest;
// self.stp.fx = new_cost;
// self.stp.gx = dg;
if (self.brackt && (self.stp.x <= stmin || self.stp.x >= stmax)) || self.infoc == 0 {
info = 6;
}
if (self.stp.x - self.stpmax).abs() < std::f64::EPSILON
&& self.f <= ftest1
&& dg <= self.dgtest
{
info = 5;
}
if (self.stp.x - self.stpmin).abs() < std::f64::EPSILON
&& (self.f > ftest1 || dg >= self.dgtest)
{
info = 4;
}
if self.brackt && stmax - stmin <= self.xtol * stmax {
info = 2;
}
if self.f <= ftest1 && dg.abs() <= self.gtol * (-self.dginit) {
info = 1;
}
if info != 0 {
return Ok(ArgminIterData::new()
.param(cur_param)
.cost(cur_cost)
.grad(cur_grad)
.termination_reason(TerminationReason::LineSearchConditionMet));
}
if self.stage1 && self.f <= ftest1 && dg >= self.ftol.min(self.gtol) * self.dginit {
self.stage1 = false;
}
if self.stage1 && self.f <= self.stp.fx && self.f > ftest1 {
let fm = self.f - self.stp.x * self.dgtest;
let fxm = self.stx.fx - self.stx.x * self.dgtest;
let fym = self.sty.fx - self.sty.x * self.dgtest;
let dgm = dg - self.dgtest;
let dgxm = self.stx.gx - self.dgtest;
let dgym = self.sty.gx - self.dgtest;
let (stx1, sty1, stp1, brackt1, _stmin, _stmax, infoc) = cstep(
Step::new(self.stx.x, fxm, dgxm),
Step::new(self.sty.x, fym, dgym),
Step::new(self.stp.x, fm, dgm),
self.brackt,
stmin,
stmax,
);
self.stx.x = stx1.x;
self.sty.x = sty1.x;
self.stp.x = stp1.x;
self.stx.fx += stx1.x * self.dgtest;
self.sty.fx += sty1.x * self.dgtest;
self.stx.gx += self.dgtest;
self.sty.gx += self.dgtest;
self.brackt = brackt1;
self.stp = stp1;
self.infoc = infoc;
} else {
let (stx1, sty1, stp1, brackt1, _stmin, _stmax, infoc) = cstep(
self.stx.clone(),
self.sty.clone(),
Step::new(self.stp.x, self.f, dg),
self.brackt,
stmin,
stmax,
);
self.stx = stx1;
self.sty = sty1;
self.stp = stp1;
self.f = self.stp.fx;
// dg = self.stp.gx;
self.brackt = brackt1;
self.infoc = infoc;
}
if self.brackt {
if (self.sty.x - self.stx.x).abs() >= 0.66 * self.width1 {
self.stp.x = self.stx.x + 0.5 * (self.sty.x - self.stx.x);
}
self.width1 = self.width;
self.width = (self.sty.x - self.stx.x).abs();
}
// let new_param = self
// .init_param
// .scaled_add(&self.stp.x, &self.search_direction);
// Ok(ArgminIterData::new().param(new_param))
Ok(ArgminIterData::new())
}
}
fn cstep(
stx: Step,
sty: Step,
stp: Step,
brackt: bool,
stpmin: f64,
stpmax: f64,
) -> (Step, Step, Step, bool, f64, f64, usize) {
let mut info: usize = 0;
let bound: bool;
let mut stpf: f64;
let stpc: f64;
let stpq: f64;
let mut brackt = brackt;
// check inputs
if (brackt && (stp.x <= stx.x.min(sty.x) || stp.x >= stx.x.max(sty.x)))
|| stx.gx * (stp.x - stx.x) >= 0.0
|| stpmax < stpmin
{
return (stx, sty, stp, brackt, stpmin, stpmax, info);
}
// determine if the derivatives have opposite sign
let sgnd = stp.gx * (stx.gx / stx.gx.abs());
if stp.fx > stx.fx {
// First case. A higher function value. The minimum is bracketed. If the cubic step is closer to
// stx.x than the quadratic step, the cubic step is taken, else the average of the cubic and
// the quadratic steps is taken.
info = 1;
bound = true;
let theta = 3.0 * (stx.fx - stp.fx) / (stp.x - stx.x) + stx.gx + stp.gx;
let tmp = vec![theta, stx.gx, stp.gx];
let s = tmp.iter().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
let mut gamma = s * ((theta / s).powi(2) - (stx.gx / s) * (stp.gx / s)).sqrt();
if stp.x < stx.x {
gamma = -gamma;
}
let p = (gamma - stx.gx) + theta;
let q = ((gamma - stx.gx) + gamma) + stp.gx;
let r = p / q;
stpc = stx.x + r * (stp.x - stx.x);
stpq = stx.x
+ ((stx.gx / ((stx.fx - stp.fx) / (stp.x - stx.x) + stx.gx)) / 2.0) * (stp.x - stx.x);
if (stpc - stx.x).abs() < (stpq - stx.x).abs() {
stpf = stpc;
} else {
stpf = stpc + (stpq - stpc) / 2.0;
}
brackt = true;
} else if sgnd < 0.0 {
// Second case. A lower function value and derivatives of opposite sign. The minimum is
// bracketed. If the cubic step is closer to stx.x than the quadtratic (secant) step, the
// cubic step is taken, else the quadratic step is taken.
info = 2;
bound = false;
let theta = 3.0 * (stx.fx - stp.fx) / (stp.x - stx.x) + stx.gx + stp.gx;
let tmp = vec![theta, stx.gx, stp.gx];
let s = tmp.iter().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
let mut gamma = s * ((theta / s).powi(2) - (stx.gx / s) * (stp.gx / s)).sqrt();
if stp.x > stx.x {
gamma = -gamma;
}
let p = (gamma - stp.gx) + theta;
let q = ((gamma - stp.gx) + gamma) + stx.gx;
let r = p / q;
stpc = stp.x + r * (stx.x - stp.x);
stpq = stp.x + (stp.gx / (stp.gx - stx.gx)) * (stx.x - stp.x);
if (stpc - stp.x).abs() > (stpq - stp.x).abs() {
stpf = stpc;
} else {
stpf = stpq;
}
brackt = true;
} else if stp.gx.abs() < stx.gx.abs() {
// Third case. A lower function value, derivatives of the same sign, and the magnitude of
// the derivative decreases. The cubic step is only used if the cubic tends to infinity in
// the direction of the step or if the minimum of the cubic is beyond stp.x. Otherwise the
// cubic step is defined to be either stpmin or stpmax. The quadtratic (secant) step is
// also computed and if the minimum is bracketed then the step closest to stx.x is taken,
// else the step farthest away is taken.
info = 3;
bound = true;
let theta = 3.0 * (stx.fx - stp.fx) / (stp.x - stx.x) + stx.gx + stp.gx;
let tmp = vec![theta, stx.gx, stp.gx];
let s = tmp.iter().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
// the case gamma == 0 only arises if the cubic does not tend to infinity in the direction
// of the step.
let mut gamma = s * 0.0f64
.max((theta / s).powi(2) - (stx.gx / s) * (stp.gx / s))
.sqrt();
if stp.x > stx.x {
gamma = -gamma;
}
let p = (gamma - stp.gx) + theta;
let q = (gamma + (stx.gx - stp.gx)) + gamma;
let r = p / q;
if r < 0.0 && gamma != 0.0 {
stpc = stp.x + r * (stx.x - stp.x);
} else if stp.x > stx.x {
stpc = stpmax;
} else {
stpc = stpmin;
}
stpq = stp.x + (stp.gx / (stp.gx - stx.gx)) * (stx.x - stp.x);
if brackt {
if (stp.x - stpc).abs() < (stp.x - stpq).abs() {
stpf = stpc;
} else {
stpf = stpq;
}
} else if (stp.x - stpc).abs() > (stp.x - stpq).abs() {
stpf = stpc;
} else {
stpf = stpq;
}
} else {
// Fourth case. A lower function value, derivatives of the same sign, and the magnitued of
// the derivative does not decrease. If the minimum is not bracketed, the step is either
// stpmin or stpmax, else the cubic step is taken.
info = 4;
bound = false;
if brackt {
let theta = 3.0 * (stp.fx - sty.fx) / (sty.x - stp.x) + sty.gx + stp.gx;
let tmp = vec![theta, sty.gx, stp.gx];
let s = tmp.iter().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
let mut gamma = s * ((theta / s).powi(2) - (sty.gx / s) * (stp.gx / s)).sqrt();
if stp.x > sty.x {
gamma = -gamma;
}
let p = (gamma - stp.gx) + theta;
let q = ((gamma - stp.gx) + gamma) + sty.gx;
let r = p / q;
stpc = stp.x + r * (sty.x - stp.x);
stpf = stpc;
} else if stp.x > stx.x {
stpf = stpmax;
} else {
stpf = stpmin;
}
}
// Update the interval of uncertainty. This update does not depend on the new step or the case
// analysis above.
let mut stx_o = stx.clone();
let mut sty_o = sty.clone();
let mut stp_o = stp.clone();
if stp_o.fx > stx_o.fx {
sty_o = Step::new(stp_o.x, stp_o.fx, stp_o.gx);
} else {
if sgnd < 0.0 {
sty_o = Step::new(stx_o.x, stx_o.fx, stx_o.gx);
}
stx_o = Step::new(stp_o.x, stp_o.fx, stp_o.gx);
}
// compute the new step and safeguard it.
stpf = stpmax.min(stpf);
stpf = stpmin.max(stpf);
stp_o.x = stpf;
if brackt && bound {
if sty_o.x > stx_o.x {
stp_o.x = stp_o.x.min(stx_o.x + 0.66 * (sty_o.x - stx_o.x));
} else {
stp_o.x = stp_o.x.max(stx_o.x + 0.66 * (sty_o.x - stx_o.x));
}
}
(stx_o, sty_o, stp_o, brackt, stpmin, stpmax, info)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_trait_impl;
use crate::MinimalNoOperator;
test_trait_impl!(morethuente, MoreThuenteLineSearch<MinimalNoOperator>);
}
| 32.438655 | 104 | 0.517434 |
2287bbbf7459600b3e40915f33fb9e1cd0611a7b | 8,718 | //! Functionality for generating SVG representations of barcodes.
//!
//! An SVG can be constructed via the standard constructor pattern
//! or via a constructor method if you want default values.
//!
//! For example:
//!
//! ```rust
//! use barcoders::generators::svg::*;
//!
//! // Specify your own struct fields.
//! let svg = SVG{height: 80,
//! xdim: 1,
//! background: Color{rgba: [255, 0, 0, 255]},
//! foreground: Color::black()};
//!
//! // Or use the constructor for defaults (you must specify the height).
//! let svg = SVG::new(100);
//! ```
use error::Result;
trait ToHex {
fn to_hex(&self) -> String;
fn format_hex(n: u8) -> String {
format!("{}{}",
Self::to_hex_digit(n / 16),
Self::to_hex_digit(n % 16))
}
fn to_hex_digit(n: u8) -> char {
match n {
d if d < 10 => (d + 48) as char,
d if d < 16 => (d + 87) as char,
_ => '0',
}
}
}
/// Represents a RGBA color for the barcode foreground and background.
#[derive(Copy, Clone, Debug)]
pub struct Color {
/// Reg, Green, Blue, Alpha value.
pub rgba: [u8; 4],
}
impl Color {
/// Constructor.
pub fn new(rgba: [u8; 4]) -> Color {
Color{rgba: rgba}
}
/// Constructor for black (#000000).
pub fn black() -> Color {
Color::new([0, 0, 0, 255])
}
/// Constructor for white (#FFFFFF).
pub fn white() -> Color {
Color::new([255, 255, 255, 255])
}
fn to_opacity(&self) -> String {
format!("{:.*}", 2, (self.rgba[3] as f64 / 255.0))
}
}
impl ToHex for Color {
fn to_hex(&self) -> String {
self.rgba
.iter()
.take(3)
.map(|&c| Self::format_hex(c))
.collect()
}
}
/// The SVG barcode generator type.
#[derive(Copy, Clone, Debug)]
pub struct SVG {
/// The height of the barcode (```self.height``` pixels high for SVG).
pub height: u32,
/// The X dimension. Specifies the width of the "narrow" bars.
/// For SVG, each will be ```self.xdim``` pixels wide.
pub xdim: u32,
/// The RGBA color for the foreground.
pub foreground: Color,
/// The RGBA color for the foreground.
pub background: Color,
}
impl SVG {
/// Returns a new SVG with default values.
pub fn new(height: u32) -> SVG {
SVG {
height: height,
xdim: 1,
foreground: Color{rgba: [0, 0, 0, 255]},
background: Color{rgba: [255, 255, 255, 255]},
}
}
fn rect(&self, style: u8, offset: u32, width: u32) -> String {
let fill = match style {
1 => self.foreground,
_ => self.background,
};
let opacity = match &fill.to_opacity()[..] {
"1.00" | "1" => "".to_string(),
o => format!(" fill-opacity=\"{}\" ", o),
};
format!("<rect x=\"{}\" y=\"0\" width=\"{}\" height=\"{}\" fill=\"#{}\"{}/>",
offset, width, self.height, fill.to_hex(), opacity)
}
/// Generates the given barcode. Returns a `Result<String, Error>` of the SVG data or an
/// error message.
pub fn generate<T: AsRef<[u8]>>(&self, barcode: T) -> Result<String> {
let barcode = barcode.as_ref();
let width = (barcode.len() as u32) * self.xdim;
let rects: String = barcode.iter()
.enumerate()
.filter(|&(_, &n)| n == 1)
.map(|(i, &n)| self.rect(n, (i as u32 * self.xdim), self.xdim))
.collect();
Ok(format!("<svg version=\"1.1\" viewBox=\"0 0 {w} {h}\">{s}{r}</svg>",
w=width, h=self.height, s=self.rect(0, 0, width), r=rects))
}
}
#[cfg(test)]
mod tests {
use ::sym::ean13::*;
use ::sym::ean8::*;
use ::sym::code39::*;
use ::sym::code93::*;
use ::sym::code11::*;
use ::sym::code128::*;
use ::sym::ean_supp::*;
use ::sym::tf::*;
use ::sym::codabar::*;
use ::generators::svg::*;
use std::io::prelude::*;
use std::io::BufWriter;
use std::fs::File;
use std::path::Path;
const TEST_DATA_BASE: &str = "./target/debug";
const WRITE_TO_FILE: bool = true;
fn write_file(data: &str, file: &'static str) {
let path = open_file(file);
let mut writer = BufWriter::new(path);
writer.write(data.as_bytes()).unwrap();
}
fn open_file(name: &'static str) -> File {
File::create(&Path::new(&format!("{}/{}", TEST_DATA_BASE, name)[..])).unwrap()
}
#[test]
fn ean_13_as_svg() {
let ean13 = EAN13::new("750103131130").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&ean13.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "ean13.svg"); }
assert_eq!(generated.len(), 2890);
}
#[test]
fn colored_ean_13_as_svg() {
let ean13 = EAN13::new("750103131130").unwrap();
let svg = SVG{height: 80,
xdim: 1,
background: Color{rgba: [255, 0, 0, 255]},
foreground: Color{rgba: [0, 0, 255, 255]}};
let generated = svg.generate(&ean13.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "ean13_colored.svg"); }
assert_eq!(generated.len(), 2890);
}
#[test]
fn colored_semi_transparent_ean_13_as_svg() {
let ean13 = EAN13::new("750103131130").unwrap();
let svg = SVG{height: 70,
xdim: 1,
background: Color{rgba: [255, 0, 0, 128]},
foreground: Color{rgba: [0, 0, 255, 128]}};
let generated = svg.generate(&ean13.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "ean13_colored_semi_transparent.svg"); }
assert_eq!(generated.len(), 3940);
}
#[test]
fn ean_8_as_svg() {
let ean8 = EAN8::new("9998823").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&ean8.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "ean8.svg"); }
assert_eq!(generated.len(), 1921);
}
#[test]
fn code39_as_svg() {
let code39 = Code39::new("IGOT99PROBLEMS").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&code39.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "code39.svg"); }
assert_eq!(generated.len(), 6539);
}
#[test]
fn code93_as_svg() {
let code93 = Code93::new("IGOT99PROBLEMS").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&code93.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "code93.svg"); }
assert_eq!(generated.len(), 4458);
}
#[test]
fn codabar_as_svg() {
let codabar = Codabar::new("A12----34A").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&codabar.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "codabar.svg"); }
assert_eq!(generated.len(), 2950);
}
#[test]
fn code128_as_svg() {
let code128 = Code128::new("ÀHIĆ345678").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&code128.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "code128.svg"); }
assert_eq!(generated.len(), 2723);
}
#[test]
fn ean_2_as_svg() {
let ean2 = EANSUPP::new("78").unwrap();
let svg = SVG::new(80);
let generated = svg.generate(&ean2.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "ean2.svg"); }
assert_eq!(generated.len(), 725);
}
#[test]
fn itf_as_svg() {
let itf = TF::interleaved("1234123488993344556677118").unwrap();
let svg = SVG{height: 80,
xdim: 1,
background: Color::black(),
foreground: Color::white()};
let generated = svg.generate(&itf.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "itf.svg"); }
assert_eq!(generated.len(), 7123);
}
#[test]
fn code11_as_svg() {
let code11 = Code11::new("9988-45643201").unwrap();
let svg = SVG{height: 80,
xdim: 1,
background: Color::black(),
foreground: Color::white()};
let generated = svg.generate(&code11.encode()[..]).unwrap();
if WRITE_TO_FILE { write_file(&generated[..], "code11.svg"); }
assert_eq!(generated.len(), 4219);
}
}
| 29.06 | 94 | 0.523859 |
38216ab71fbd5d00e97b92adaf6802bead1c5542 | 2,428 | /*
*
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LolCosmeticsCosmeticsTftDamageSkin {
#[serde(rename = "contentId", skip_serializing_if = "Option::is_none")]
pub content_id: Option<String>,
#[serde(rename = "description", skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "groupId", skip_serializing_if = "Option::is_none")]
pub group_id: Option<i32>,
#[serde(rename = "groupName", skip_serializing_if = "Option::is_none")]
pub group_name: Option<String>,
#[serde(rename = "itemId", skip_serializing_if = "Option::is_none")]
pub item_id: Option<i32>,
#[serde(rename = "level", skip_serializing_if = "Option::is_none")]
pub level: Option<i32>,
#[serde(rename = "loadoutsIcon", skip_serializing_if = "Option::is_none")]
pub loadouts_icon: Option<String>,
#[serde(rename = "loyalty", skip_serializing_if = "Option::is_none")]
pub loyalty: Option<bool>,
#[serde(rename = "name", skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "owned", skip_serializing_if = "Option::is_none")]
pub owned: Option<bool>,
#[serde(rename = "purchaseDate", skip_serializing_if = "Option::is_none")]
pub purchase_date: Option<String>,
#[serde(rename = "rarityValue", skip_serializing_if = "Option::is_none")]
pub rarity_value: Option<i32>,
#[serde(rename = "selected", skip_serializing_if = "Option::is_none")]
pub selected: Option<bool>,
#[serde(rename = "upgrades", skip_serializing_if = "Option::is_none")]
pub upgrades: Option<Vec<String>>,
}
impl LolCosmeticsCosmeticsTftDamageSkin {
pub fn new() -> LolCosmeticsCosmeticsTftDamageSkin {
LolCosmeticsCosmeticsTftDamageSkin {
content_id: None,
description: None,
group_id: None,
group_name: None,
item_id: None,
level: None,
loadouts_icon: None,
loyalty: None,
name: None,
owned: None,
purchase_date: None,
rarity_value: None,
selected: None,
upgrades: None,
}
}
}
| 35.705882 | 109 | 0.646623 |
acc4342342002411f28613baf2f0b8d7451e54d7 | 2,000 | mod agents;
mod post;
mod text_input;
use agents::posts::{PostId, PostRequest, PostStore};
use gloo_console as console;
use post::Post;
use text_input::TextInput;
use yew::prelude::*;
use yew_agent::utils::store::{Bridgeable, ReadOnly, StoreWrapper};
use yew_agent::Bridge;
pub enum Msg {
CreatePost(String),
PostStoreMsg(ReadOnly<PostStore>),
}
pub struct Model {
post_ids: Vec<PostId>,
post_store: Box<dyn Bridge<StoreWrapper<PostStore>>>,
}
impl Component for Model {
type Message = Msg;
type Properties = ();
fn create(ctx: &Context<Self>) -> Self {
let callback = ctx.link().callback(Msg::PostStoreMsg);
Self {
post_ids: Vec::new(),
post_store: PostStore::bridge(callback),
}
}
fn update(&mut self, _ctx: &Context<Self>, msg: Self::Message) -> bool {
match msg {
Msg::CreatePost(text) => {
self.post_store.send(PostRequest::Create(text));
false
}
Msg::PostStoreMsg(state) => {
// We can see this is logged once before we click any button.
// The state of the store is sent when we open a bridge.
console::log!("Received update");
let state = state.borrow();
if state.posts.len() != self.post_ids.len() {
self.post_ids = state.posts.keys().copied().collect();
self.post_ids.sort_unstable();
true
} else {
false
}
}
}
}
fn view(&self, ctx: &Context<Self>) -> Html {
html! {
<>
<TextInput value="New post" onsubmit={ctx.link().callback(Msg::CreatePost)} />
<div>
{ for self.post_ids.iter().map(|&id| html!{ <Post key={id} {id} /> }) }
</div>
</>
}
}
}
fn main() {
yew::start_app::<Model>();
}
| 27.39726 | 94 | 0.518 |
1694e2cd6619012626ed26df382aea23ba2bcdc2 | 692 | /*
* Asana
*
* This is the interface for interacting with the [Asana Platform](https://developers.asana.com). Our API reference is generated from our [OpenAPI spec] (https://raw.githubusercontent.com/Asana/developer-docs/master/defs/asana_oas.yaml).
*
* The version of the OpenAPI document: 1.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InlineObject56 {
#[serde(rename = "data", skip_serializing_if = "Option::is_none")]
pub data: Option<Box<crate::models::WorkspaceRemoveUserRequest>>,
}
impl InlineObject56 {
pub fn new() -> InlineObject56 {
InlineObject56 { data: None }
}
}
| 31.454545 | 237 | 0.709538 |
8f43f3e589dbca09ebda574a7f9e9b9760ae2119 | 930 | use conjure_object::serde::{de, ser};
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Default)]
pub struct BinaryAliasExample(pub conjure_object::ByteBuf);
impl std::ops::Deref for BinaryAliasExample {
type Target = conjure_object::ByteBuf;
#[inline]
fn deref(&self) -> &conjure_object::ByteBuf {
&self.0
}
}
impl std::ops::DerefMut for BinaryAliasExample {
#[inline]
fn deref_mut(&mut self) -> &mut conjure_object::ByteBuf {
&mut self.0
}
}
impl ser::Serialize for BinaryAliasExample {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
self.0.serialize(s)
}
}
impl<'de> de::Deserialize<'de> for BinaryAliasExample {
fn deserialize<D>(d: D) -> Result<BinaryAliasExample, D::Error>
where
D: de::Deserializer<'de>,
{
de::Deserialize::deserialize(d).map(BinaryAliasExample)
}
}
| 28.181818 | 70 | 0.63871 |
8a75692ddc2c13ead80cef5f0627253e53f20110 | 4,421 | #[doc = "Register `CHNL_SW_REQUEST` writer"]
pub struct W(crate::W<CHNL_SW_REQUEST_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<CHNL_SW_REQUEST_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<CHNL_SW_REQUEST_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<CHNL_SW_REQUEST_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CH3` writer - Channel SW request"]
pub struct CH3_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3);
self.w
}
}
#[doc = "Field `CH2` writer - Channel SW request"]
pub struct CH2_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
#[doc = "Field `CH1` writer - Channel SW request"]
pub struct CH1_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Field `CH0` writer - Channel SW request"]
pub struct CH0_W<'a> {
w: &'a mut W,
}
impl<'a> CH0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
impl W {
#[doc = "Bit 3 - Channel SW request"]
#[inline(always)]
pub fn ch3(&mut self) -> CH3_W {
CH3_W { w: self }
}
#[doc = "Bit 2 - Channel SW request"]
#[inline(always)]
pub fn ch2(&mut self) -> CH2_W {
CH2_W { w: self }
}
#[doc = "Bit 1 - Channel SW request"]
#[inline(always)]
pub fn ch1(&mut self) -> CH1_W {
CH1_W { w: self }
}
#[doc = "Bit 0 - Channel SW request"]
#[inline(always)]
pub fn ch0(&mut self) -> CH0_W {
CH0_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "DMA channel software request\n\nThis register you can [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [chnl_sw_request](index.html) module"]
pub struct CHNL_SW_REQUEST_SPEC;
impl crate::RegisterSpec for CHNL_SW_REQUEST_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [chnl_sw_request::W](W) writer structure"]
impl crate::Writable for CHNL_SW_REQUEST_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets CHNL_SW_REQUEST to value 0"]
impl crate::Resettable for CHNL_SW_REQUEST_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 28.707792 | 346 | 0.557114 |
1c91d72a916039433d0f2dfb6d0cdbd8a823d6fd | 2,364 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{create_and_start_server, gen_block_id, gen_ledger_info_with_sigs};
use config_builder::util::get_test_config;
use crypto::{
hash::GENESIS_BLOCK_ID,
signing::{generate_keypair, KeyPair},
};
use execution_client::ExecutionClient;
use execution_proto::ExecuteBlockRequest;
use futures01::future::Future;
use grpcio::EnvBuilder;
use std::sync::Arc;
use types::{
account_address::AccountAddress,
account_config,
transaction::{RawTransaction, SignedTransaction},
};
use vm_genesis::encode_mint_program;
fn encode_mint_transaction(seqnum: u64, sender_keypair: &KeyPair) -> SignedTransaction {
let (_privkey, pubkey) = generate_keypair();
let sender = account_config::association_address();
let receiver = AccountAddress::from(pubkey);
let program = encode_mint_program(&receiver, 100);
let raw_txn = RawTransaction::new(
sender,
seqnum,
program,
/* max_gas_amount = */ 100_000,
/* gas_unit_price = */ 1,
std::time::Duration::from_secs(u64::max_value()),
);
raw_txn
.sign(&sender_keypair.private_key(), sender_keypair.public_key())
.expect("Signing should work.")
.into_inner()
}
#[test]
fn test_execution_service_basic() {
let (config, faucet_keypair) = get_test_config();
let (_storage_server_handle, mut execution_server) = create_and_start_server(&config);
let execution_client = ExecutionClient::new(
Arc::new(EnvBuilder::new().build()),
&config.execution.address,
config.execution.port,
);
let parent_block_id = *GENESIS_BLOCK_ID;
let block_id = gen_block_id(1);
let version = 100;
let txns = (0..version)
.map(|i| encode_mint_transaction(i, &faucet_keypair))
.collect();
let execute_block_request = ExecuteBlockRequest::new(txns, parent_block_id, block_id);
let execute_block_response = execution_client
.execute_block(execute_block_request)
.unwrap();
let ledger_info_with_sigs = gen_ledger_info_with_sigs(
u64::from(version),
execute_block_response.root_hash(),
block_id,
);
execution_client
.commit_block(ledger_info_with_sigs)
.unwrap();
execution_server.shutdown().wait().unwrap();
}
| 31.52 | 90 | 0.692893 |
72bb940de19db62e6c79297db7e26471cb763364 | 6,255 | #[macro_use]
extern crate clap;
extern crate shelper;
use clap::{App, Arg};
use shelper::api;
use shelper::jobs;
use shelper::tunnels;
use shelper::users;
mod input_stripper;
fn main() {
let cmds = App::new("shelper")
.version(env!("CARGO_PKG_VERSION"))
.author(crate_authors!())
.about(r#"Get details about jobs and tunnels. For convenience, shelper will look for the SAUCE_USERNAME and SAUCE_ACCESS_KEY environment variables. You can overwrite this with the --owner and --key flag.
If you want details about a job & you have your Sauce credentials saved as environment variables you would run
shelper -j <job-id>
If you want info about a tunnel you would run
shelper -t <tunnel-id> -o <owner-of-the-tunnel>
If you owned the tunnel and have your Sauce credentials saved as environment variables you could omit the -o and -k flags."#)
.version(crate_version!())
.arg(
Arg::with_name("version")
.long("version")
.help("Print the current version of Shelper")
.takes_value(false),
)
.arg(
Arg::with_name("job")
.long("jobinfo")
.short("j")
.help("Get job details. Takes a URL link to a session or a Job ID string")
.value_name("one or more job")
.multiple(true)
.takes_value(true)
)
.arg(
Arg::with_name("owner")
.help("Sauce account that owns a sauce resource (tunnel, job, asset)")
.long("owner")
.short("o")
.value_name("sauce_username")
.takes_value(true)
.multiple(false),
)
.arg(
Arg::with_name("access_key")
.help("Sauce Access Key")
.short("k")
.long("key")
.value_name("key")
.takes_value(true)
.multiple(false)
)
.arg(
Arg::with_name("region")
.help("Region/datacenter to search.")
.short("r")
.long("region")
.takes_value(true)
.value_name("region")
.possible_value("EU")
.possible_value("US"),
)
.arg(
Arg::with_name("tunnel")
.help(r#"Get information about a tunnel. REQUIRES:
- the sauce username that created the tunnel, either in the Owner flag(-o/--owner) OR as an env. variable
- the access key used to authenticate (env variable or as a flag)
- the tunnel id, provided at tunnel runtime"#)
.short("t")
.long("tunnelinfo")
.value_name("tunnel_id")
.multiple(true)
.takes_value(true)
)
.get_matches();
if cmds.is_present("version") {
println!("shelper version {}", env!("CARGO_PKG_VERSION"))
}
// if the user doesn't specify a region default to US
let region = match cmds.is_present("region") {
true => value_t!(cmds, "region", users::Region).unwrap_or_else(|e| e.exit()),
false => users::Region::US,
};
// Build out a user w/ key + username + region
let owner: users::User;
if cmds.is_present("owner") && cmds.is_present("access_key") {
let key_arg = cmds.value_of("access_key").unwrap().to_string();
let owner_arg = cmds.value_of("owner").unwrap().to_string();
match region {
users::Region::US => owner = users::User::new(Some(owner_arg), Some(key_arg), None),
users::Region::EU => {
owner = users::User::new(Some(owner_arg), Some(key_arg), Some(users::Region::EU))
}
}
} else if cmds.is_present("owner") {
owner = users::User::new(
Some(cmds.value_of("owner").unwrap().to_string()),
None,
Some(region),
);
// println!("new owner {:?}", owner);
} else {
match region {
users::Region::US => owner = users::User::new(None, None, None),
users::Region::EU => owner = users::User::new(None, None, Some(users::Region::EU)),
}
}
if let Some(jobs) = cmds.values_of("job") {
let sanitized_jobs = input_stripper::get_job_id(jobs.collect());
let job_count = sanitized_jobs.len();
for (i, job) in sanitized_jobs.iter().enumerate() {
let deets: shelper::jobs::JobDetails;
if !cmds.is_present("access_key") {
let admin = users::User::new(None, None, None);
deets = match jobs::JobDetails::new(job, &owner, Some(&admin)) {
Ok(deets) => deets,
Err(e) => {
eprintln!("{}", e);
continue;
}
}
} else {
deets = match jobs::JobDetails::new(job, &owner, Some(&owner)) {
Ok(deets) => deets,
Err(e) => {
eprintln!("{}", e);
continue;
}
}
}
println!("{}/{}", i + 1, job_count);
deets.pretty_print();
println!("");
}
}
if let Some(t) = cmds.values_of("tunnel") {
// println!("Splitting: {:?}", t);
let tunnel_list: Vec<&str> = t.collect();
let tunnel_count = tunnel_list.len();
// println!("{:?}", tunnels)
for (i, tunnel) in tunnel_list.iter().enumerate() {
if !cmds.is_present("access_key") {
let admin = users::User::new(None, None, None);
let info: tunnels::TunnelMetadata =
match api::tunnel_raw(&owner, &tunnel, Some(&admin)) {
Ok(resp) => serde_json::from_str(&resp).unwrap(),
Err(e) => {
eprintln!("{}", e);
continue;
}
};
println!("{}/{}", i + 1, tunnel_count);
info.pretty_print();
}
}
}
}
| 37.232143 | 213 | 0.495763 |
bb687dd4e08094cb7d69fdf004e0601dfc8b3b64 | 16,719 | /*
* Copyright 2019 Boyd Johnson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use nom::{
complete, digit, do_parse, many0, map_res, named, opt, rest, tag, take_till1, take_while,
types::CompleteStr, whitespace::sp,
};
use serde_json::Value;
use std::{
cmp::{Ordering, PartialOrd},
num::{ParseFloatError, ParseIntError},
str::{FromStr, ParseBoolError},
};
pub use yajlish::ndjson_handler::Selector;
pub type Null = Option<NonNullJsonValue>;
#[derive(Debug, PartialEq)]
pub enum NonNullJsonValue {
String(String),
Bool(bool),
I64(i64),
Float(f64),
}
impl PartialOrd for NonNullJsonValue {
fn partial_cmp(&self, other: &NonNullJsonValue) -> Option<Ordering> {
use NonNullJsonValue::{Bool, Float, String, I64};
match (self, other) {
(String(s), String(o)) => s.partial_cmp(o),
(Bool(b), Bool(o)) => b.partial_cmp(o),
(I64(i), I64(o)) => i.partial_cmp(o),
(Float(f), Float(o)) => f.partial_cmp(o),
_ => None,
}
}
}
#[derive(Debug, PartialEq)]
pub struct ParseNullError;
#[derive(Debug, PartialEq)]
pub enum Comparator {
LT,
LE,
GT,
GE,
EQ,
NE,
}
#[derive(Debug, PartialEq)]
pub struct Compare<T> {
comparator: Comparator,
value: T,
}
impl<T> Compare<T>
where
T: ParseValue,
{
pub fn compare(&self, other: Value) -> bool {
match self.comparator {
Comparator::LT => T::parse_value(other)
.map(|o| o < self.value)
.unwrap_or(false),
Comparator::LE => T::parse_value(other)
.map(|o| o <= self.value)
.unwrap_or(false),
Comparator::GT => T::parse_value(other)
.map(|o| o > self.value)
.unwrap_or(false),
Comparator::GE => T::parse_value(other)
.map(|o| o >= self.value)
.unwrap_or(false),
Comparator::EQ => T::parse_value(other)
.map(|o| o == self.value)
.unwrap_or(false),
Comparator::NE => T::parse_value(other)
.map(|o| o != self.value)
.unwrap_or(false),
}
}
}
pub trait ParseValue: Sized + PartialOrd {
fn parse_value(val: Value) -> Option<Self>;
}
impl ParseValue for String {
fn parse_value(val: Value) -> Option<Self> {
match val {
Value::String(v) => Some(v),
Value::Null => None,
Value::Number(num) => Some(num.to_string()),
Value::Bool(b) => Some(b.to_string()),
_ => None,
}
}
}
impl ParseValue for u64 {
fn parse_value(val: Value) -> Option<Self> {
match val {
Value::String(v) => u64::from_str(&v).ok(),
Value::Number(num) => num.as_u64(),
Value::Null => None,
Value::Bool(_) => None,
_ => None,
}
}
}
impl ParseValue for i64 {
fn parse_value(val: Value) -> Option<Self> {
match val {
Value::String(_) => None,
Value::Number(num) => num.as_i64(),
Value::Null => None,
Value::Bool(_) => None,
_ => None,
}
}
}
impl ParseValue for f64 {
fn parse_value(val: Value) -> Option<Self> {
match val {
Value::String(_) => None,
Value::Number(num) => num.as_f64(),
Value::Null => None,
Value::Bool(_) => None,
_ => None,
}
}
}
impl ParseValue for bool {
fn parse_value(val: Value) -> Option<Self> {
match val {
Value::String(_) => None,
Value::Number(_) => None,
Value::Null => None,
Value::Bool(b) => Some(b),
_ => None,
}
}
}
impl ParseValue for Null {
fn parse_value(val: Value) -> Option<Self> {
match val {
Value::String(s) => Some(Some(NonNullJsonValue::String(s))),
Value::Number(s) => {
if let Some(n) = s.as_i64() {
Some(Some(NonNullJsonValue::I64(n)))
} else if let Some(f) = s.as_f64() {
Some(Some(NonNullJsonValue::Float(f)))
} else {
Some(None)
}
}
Value::Null => Some(None),
Value::Bool(b) => Some(Some(NonNullJsonValue::Bool(b))),
_ => None,
}
}
}
fn parse_u64(s: CompleteStr) -> Result<u64, ParseIntError> {
s.parse::<u64>()
}
fn parse_i64(s: CompleteStr) -> Result<i64, ParseIntError> {
s.parse()
}
fn parse_null(s: CompleteStr) -> Result<Null, ParseNullError> {
if let Ok(v) = s.to_string().parse::<Value>() {
if v == Value::Null {
return Ok(None);
}
}
Err(ParseNullError)
}
fn parse_bool(s: CompleteStr) -> Result<bool, ParseBoolError> {
s.to_string().parse()
}
fn parse_usize(s: CompleteStr) -> Result<usize, ParseIntError> {
s.parse()
}
fn parse_f64(s: CompleteStr) -> Result<f64, ParseFloatError> {
s.parse::<f64>()
}
fn parse_string(s: CompleteStr) -> Result<String, std::convert::Infallible> {
s.parse()
}
named!(
parse_self_signifier<CompleteStr, Option<Selector>>,
do_parse!(
tag!("d") >>
index: opt!(complete!(parse_index)) >>
(index.map(Selector::Index))
)
);
named!(
parse_index<CompleteStr, usize>,
do_parse!(
tag!("[") >>
index: map_res!(digit, parse_usize) >>
tag!("]") >>
(index)
)
);
named!(
parse_dot_plus_identifier<CompleteStr, (Selector, Option<Selector>)>,
do_parse!(
tag!(".") >>
identifier: take_while!(is_not_dot_or_array_bracket_or_comparator) >>
index: opt!(parse_index) >>
(Selector::Identifier(format!("\"{}\"", identifier)), index.map(Selector::Index))
)
);
fn is_not_dot_or_array_bracket_or_comparator(c: char) -> bool {
!is_dot(c) && !is_array_bracket(c) && !is_comparator(c) && c != ' '
}
fn is_dot(c: char) -> bool {
c == '.'
}
fn is_array_bracket(c: char) -> bool {
c == '['
}
fn combine_identifiers(
first: Option<Selector>,
next: Vec<(Selector, Option<Selector>)>,
) -> Vec<Selector> {
let mut items = vec![];
if let Some(f) = first {
items.push(f);
}
for (ident, optional_second) in next {
items.push(ident);
if let Some(s) = optional_second {
items.push(s);
}
}
items
}
named!(
parse_many_identifiers<CompleteStr, Vec<(Selector, Option<Selector>)>>,
many0!(complete!(parse_dot_plus_identifier))
);
named!(
pub parse_json_selector<CompleteStr, Vec<Selector>>,
do_parse!(
first_array_selection: parse_self_signifier >>
identifiers: parse_many_identifiers >>
(combine_identifiers(first_array_selection, identifiers))
)
);
fn is_comparator(c: char) -> bool {
c == '<' || c == '=' || c == '!' || c == '>'
}
fn comparator(c: CompleteStr) -> Result<Comparator, ()> {
match c.0 {
"<" => Ok(Comparator::LT),
"<=" => Ok(Comparator::LE),
"==" => Ok(Comparator::EQ),
"!=" => Ok(Comparator::NE),
">" => Ok(Comparator::GT),
">=" => Ok(Comparator::GE),
_ => Err(()),
}
}
named!(
parse_comparator<CompleteStr, Comparator>,
map_res!(take_till1!(is_digit_or_space), comparator)
);
fn is_digit(c: char) -> bool {
c.is_digit(10)
}
fn is_digit_or_space(c: char) -> bool {
is_digit(c) || c == ' '
}
named!(
parse_value_f64<CompleteStr, f64>,
map_res!(rest, parse_f64)
);
named!(
parse_value_u64<CompleteStr, u64>,
map_res!(rest, parse_u64)
);
named!(
parse_value_string<CompleteStr, String>,
map_res!(rest, parse_string)
);
named!(
parse_compare_null<CompleteStr, Compare<Null>>,
do_parse!(
comparator: parse_comparator >>
opt!(sp) >>
value: map_res!(rest, parse_null) >>
(Compare { comparator, value })
)
);
named!(
parse_compare_bool<CompleteStr, Compare<bool>>,
do_parse!(
comparator: parse_comparator >>
opt!(sp) >>
value: map_res!(rest, parse_bool) >>
(Compare { comparator, value })
)
);
named!(
parse_compare_i64<CompleteStr, Compare<i64>>,
do_parse!(
comparator: parse_comparator >>
opt!(sp) >>
value: map_res!(rest, parse_i64) >>
(Compare { comparator, value })
)
);
named!(
parse_compare_u64<CompleteStr, Compare<u64>>,
do_parse!(
comparator: parse_comparator >>
opt!(sp) >>
value: map_res!(rest, parse_u64) >>
(Compare { comparator, value })
)
);
named!(
parse_compare_f64<CompleteStr, Compare<f64>>,
do_parse!(
comparator: parse_comparator >>
opt!(sp) >>
value: map_res!(rest, parse_f64) >>
(Compare { comparator, value })
)
);
named!(
parse_compare_string<CompleteStr, Compare<String>>,
do_parse!(
comparator: parse_comparator >>
opt!(sp) >>
value: map_res!(rest, parse_string) >>
(Compare { comparator, value })
)
);
named!(
pub parse_selector_i64<CompleteStr, (Compare<i64>, Vec<Selector>)>,
do_parse!(
identifiers: parse_json_selector >>
opt!(sp) >>
compare: parse_compare_i64 >>
(compare, identifiers)
)
);
named!(
pub parse_selector_null<CompleteStr, (Compare<Null>, Vec<Selector>)>,
do_parse!(
identifiers: parse_json_selector >>
opt!(sp) >>
compare: parse_compare_null >>
(compare, identifiers)
)
);
named!(
pub parse_selector_bool<CompleteStr, (Compare<bool>, Vec<Selector>)>,
do_parse!(
identifiers: parse_json_selector >>
opt!(sp) >>
compare: parse_compare_bool >>
(compare, identifiers)
)
);
named!(
pub parse_selector_u64<CompleteStr, (Compare<u64>, Vec<Selector>)>,
do_parse!(
identifiers: parse_json_selector >>
opt!(sp) >>
compare: parse_compare_u64 >>
(compare, identifiers)
)
);
named!(
pub parse_selector_f64<CompleteStr, (Compare<f64>, Vec<Selector>)>,
do_parse!(
identifiers: parse_json_selector >>
opt!(sp) >>
compare: parse_compare_f64 >>
(compare, identifiers)
)
);
named!(
pub parse_selector_string<CompleteStr, (Compare<String>, Vec<Selector>)>,
do_parse!(
identifiers: parse_json_selector >>
opt!(sp) >>
compare: parse_compare_string >>
(compare, identifiers)
)
);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_self_signifier_success() {
assert_eq!(parse_self_signifier("d".into()), Ok(("".into(), None)));
assert_eq!(
parse_self_signifier("d[0]".into()),
Ok(("".into(), Some(Selector::Index(0))))
);
assert_eq!(
parse_self_signifier("d[24]".into()),
Ok(("".into(), Some(Selector::Index(24))))
);
assert_eq!(
parse_self_signifier("d.properties.AREA".into()),
Ok((".properties.AREA".into(), None))
);
}
#[test]
fn test_parse_self_signifier_failure() {
assert!(parse_self_signifier("b".into()).is_err());
assert!(parse_self_signifier("e[]".into()).is_err());
}
#[test]
fn test_dot_plus_identifier_success() {
assert_eq!(
parse_dot_plus_identifier(".properties.AREA".into()),
Ok((
".AREA".into(),
(Selector::Identifier("\"properties\"".to_string()), None)
))
);
assert_eq!(
parse_dot_plus_identifier(".properties.contains[5]".into()),
Ok((
".contains[5]".into(),
(Selector::Identifier("\"properties\"".to_string()), None)
))
);
assert_eq!(
parse_dot_plus_identifier(".contains[5]".into()),
Ok((
"".into(),
(
Selector::Identifier("\"contains\"".to_string()),
Some(Selector::Index(5))
)
))
);
}
#[test]
fn test_dot_plus_identifier_failure() {
assert!(parse_dot_plus_identifier("simply.considered".into()).is_err())
}
#[test]
fn test_many_identifiers() {
assert_eq!(
parse_many_identifiers(".properties.AREA>".into()),
Ok((
">".into(),
vec![
(Selector::Identifier("\"properties\"".to_string()), None),
(Selector::Identifier("\"AREA\"".to_string()), None)
]
))
);
}
#[test]
fn test_json_selector_success() {
assert_eq!(
parse_json_selector("d.properties.AREA".into()),
Ok((
"".into(),
vec![
Selector::Identifier("\"properties\"".to_string()),
Selector::Identifier("\"AREA\"".to_string())
]
))
)
}
#[test]
fn test_parse_comparator_success() {
assert_eq!(
parse_comparator(">=5.5".into()),
Ok(("5.5".into(), Comparator::GE))
);
assert_eq!(
parse_comparator("== 7.4".into()),
Ok((" 7.4".into(), Comparator::EQ))
);
}
#[test]
fn test_parse_value_success() {
assert_eq!(parse_value_f64("5.5".into()), Ok(("".into(), 5.5)));
assert_eq!(parse_value_u64("6555".into()), Ok(("".into(), 6555)));
}
#[test]
fn test_parse_compare_success() {
assert_eq!(
parse_compare_f64(">= 5.5".into()),
Ok((
"".into(),
Compare {
comparator: Comparator::GE,
value: 5.5
}
))
);
assert_eq!(
parse_compare_u64("== 5".into()),
Ok((
"".into(),
Compare {
comparator: Comparator::EQ,
value: 5
}
))
);
assert_eq!(
parse_compare_f64("<= 7.4".into()),
Ok((
"".into(),
Compare {
comparator: Comparator::LE,
value: 7.4
}
))
);
assert_eq!(
parse_compare_u64("==568473".into()),
Ok((
"".into(),
Compare {
comparator: Comparator::EQ,
value: 568473
}
))
);
}
#[test]
fn test_full_selector_success() {
assert_eq!(
parse_selector_f64("d.properties.AREA >= 5.5".into()),
Ok((
"".into(),
(
Compare {
comparator: Comparator::GE,
value: 5.5
},
vec![
Selector::Identifier("\"properties\"".to_string()),
Selector::Identifier("\"AREA\"".to_string())
]
)
))
);
assert_eq!(
parse_selector_u64("d[5].manager.pay >= 40000".into()),
Ok((
"".into(),
(
Compare {
comparator: Comparator::GE,
value: 40000
},
vec![
Selector::Index(5),
Selector::Identifier("\"manager\"".to_string()),
Selector::Identifier("\"pay\"".to_string())
]
)
))
);
}
#[test]
fn test_parse_full_selector_failure() {
assert!(parse_selector_u64("d[5].manager_pay >= 60.456".into()).is_err());
assert!(parse_selector_f64("d[55]. manager. pay".into()).is_err());
}
}
| 25.408815 | 93 | 0.50966 |
76d70457823ace23b88fefb4b5adf2d3f3f7dbac | 3,526 | use nu_test_support::fs::Stub::FileWithContentToBeTrimmed;
use nu_test_support::playground::Playground;
use nu_test_support::{nu, pipeline};
#[test]
fn from_ssv_text_to_table() {
Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| {
sandbox.with_files(vec![FileWithContentToBeTrimmed(
"oc_get_svc.txt",
r#"
NAME LABELS SELECTOR IP PORT(S)
docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP
kubernetes component=apiserver,provider=kubernetes <none> 172.30.0.2 443/TCP
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
"#,
)]);
let actual = nu!(
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from ssv
| nth 0
| get IP
"#
));
assert_eq!(actual.out, "172.30.78.158");
})
}
#[test]
fn from_ssv_text_to_table_with_separator_specified() {
Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| {
sandbox.with_files(vec![FileWithContentToBeTrimmed(
"oc_get_svc.txt",
r#"
NAME LABELS SELECTOR IP PORT(S)
docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP
kubernetes component=apiserver,provider=kubernetes <none> 172.30.0.2 443/TCP
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
"#,
)]);
let actual = nu!(
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from ssv --minimum-spaces 3
| nth 0
| get IP
"#
));
assert_eq!(actual.out, "172.30.78.158");
})
}
#[test]
fn from_ssv_text_treating_first_line_as_data_with_flag() {
Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| {
sandbox.with_files(vec![FileWithContentToBeTrimmed(
"oc_get_svc.txt",
r#"
docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP
kubernetes component=apiserver,provider=kubernetes <none> 172.30.0.2 443/TCP
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
"#,
)]);
let aligned_columns = nu!(
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from ssv --noheaders -a
| first
| get Column1
"#
));
let separator_based = nu!(
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from ssv --noheaders
| first
| get Column1
"#
));
assert_eq!(aligned_columns.out, separator_based.out);
assert_eq!(separator_based.out, "docker-registry");
})
}
| 36.729167 | 126 | 0.479013 |
2fbb355cc76f3b525b62708e7d86aff04a0c676b | 11,684 | // WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "android-renderscript-ScriptIntrinsicConvolve3x3"))]
__jni_bindgen! {
/// public final class [ScriptIntrinsicConvolve3x3](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html)
///
/// Required feature: android-renderscript-ScriptIntrinsicConvolve3x3
public final class ScriptIntrinsicConvolve3x3 ("android/renderscript/ScriptIntrinsicConvolve3x3") extends crate::android::renderscript::ScriptIntrinsic {
// // Not emitting: Non-public method
// /// [ScriptIntrinsicConvolve3x3](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#ScriptIntrinsicConvolve3x3(long,%20android.renderscript.RenderScript))
// ///
// /// Required features: "android-renderscript-RenderScript"
// #[cfg(any(feature = "all", all(feature = "android-renderscript-RenderScript")))]
// fn new<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i64, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::RenderScript>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::renderscript::ScriptIntrinsicConvolve3x3>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// // class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == (empty), .name == "<init>", .descriptor == "(JLandroid/renderscript/RenderScript;)V"
// unsafe {
// let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())];
// let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "<init>\0", "(JLandroid/renderscript/RenderScript;)V\0");
// __jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
// }
// }
/// [create](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#create(android.renderscript.RenderScript,%20android.renderscript.Element))
///
/// Required features: "android-renderscript-Element", "android-renderscript-RenderScript", "android-renderscript-ScriptIntrinsicConvolve3x3"
#[cfg(any(feature = "all", all(feature = "android-renderscript-Element", feature = "android-renderscript-RenderScript", feature = "android-renderscript-ScriptIntrinsicConvolve3x3")))]
pub fn create<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::RenderScript>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::Element>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::renderscript::ScriptIntrinsicConvolve3x3>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC | STATIC, .name == "create", .descriptor == "(Landroid/renderscript/RenderScript;Landroid/renderscript/Element;)Landroid/renderscript/ScriptIntrinsicConvolve3x3;"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "create\0", "(Landroid/renderscript/RenderScript;Landroid/renderscript/Element;)Landroid/renderscript/ScriptIntrinsicConvolve3x3;\0");
__jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [setInput](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#setInput(android.renderscript.Allocation))
///
/// Required features: "android-renderscript-Allocation"
#[cfg(any(feature = "all", all(feature = "android-renderscript-Allocation")))]
pub fn setInput<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::Allocation>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC, .name == "setInput", .descriptor == "(Landroid/renderscript/Allocation;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "setInput\0", "(Landroid/renderscript/Allocation;)V\0");
__jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [setCoefficients](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#setCoefficients(float%5B%5D))
pub fn setCoefficients<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env __jni_bindgen::FloatArray>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC, .name == "setCoefficients", .descriptor == "([F)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "setCoefficients\0", "([F)V\0");
__jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [forEach](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#forEach(android.renderscript.Allocation))
///
/// Required features: "android-renderscript-Allocation"
#[cfg(any(feature = "all", all(feature = "android-renderscript-Allocation")))]
pub fn forEach_Allocation<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::Allocation>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC, .name == "forEach", .descriptor == "(Landroid/renderscript/Allocation;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "forEach\0", "(Landroid/renderscript/Allocation;)V\0");
__jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [forEach](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#forEach(android.renderscript.Allocation,%20android.renderscript.Script.LaunchOptions))
///
/// Required features: "android-renderscript-Allocation", "android-renderscript-Script_LaunchOptions"
#[cfg(any(feature = "all", all(feature = "android-renderscript-Allocation", feature = "android-renderscript-Script_LaunchOptions")))]
pub fn forEach_Allocation_LaunchOptions<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::Allocation>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::renderscript::Script_LaunchOptions>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC, .name == "forEach", .descriptor == "(Landroid/renderscript/Allocation;Landroid/renderscript/Script$LaunchOptions;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "forEach\0", "(Landroid/renderscript/Allocation;Landroid/renderscript/Script$LaunchOptions;)V\0");
__jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [getKernelID](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#getKernelID())
///
/// Required features: "android-renderscript-Script_KernelID"
#[cfg(any(feature = "all", all(feature = "android-renderscript-Script_KernelID")))]
pub fn getKernelID<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::renderscript::Script_KernelID>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC, .name == "getKernelID", .descriptor == "()Landroid/renderscript/Script$KernelID;"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "getKernelID\0", "()Landroid/renderscript/Script$KernelID;\0");
__jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [getFieldID_Input](https://developer.android.com/reference/android/renderscript/ScriptIntrinsicConvolve3x3.html#getFieldID_Input())
///
/// Required features: "android-renderscript-Script_FieldID"
#[cfg(any(feature = "all", all(feature = "android-renderscript-Script_FieldID")))]
pub fn getFieldID_Input<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::renderscript::Script_FieldID>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/renderscript/ScriptIntrinsicConvolve3x3", java.flags == PUBLIC, .name == "getFieldID_Input", .descriptor == "()Landroid/renderscript/Script$FieldID;"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/renderscript/ScriptIntrinsicConvolve3x3\0", "getFieldID_Input\0", "()Landroid/renderscript/Script$FieldID;\0");
__jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
}
}
| 97.366667 | 543 | 0.697364 |
26b6f92bf3c17c3fa684f8011b8bb3a08def2f56 | 2,122 | use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};
use ff::Field;
use paired::bls12_381::{Bls12, Fr};
use rand::thread_rng;
use storage_proofs::drgraph::new_seed;
use storage_proofs::fr32::fr_into_bytes;
use storage_proofs::hasher::sha256::Sha256Hasher;
use storage_proofs::hasher::{Domain, Hasher};
use storage_proofs::porep::stacked::{create_label, create_label_exp, StackedBucketGraph};
struct Pregenerated<H: 'static + Hasher> {
data: Vec<u8>,
replica_id: H::Domain,
graph: StackedBucketGraph<H>,
}
fn pregenerate_data<H: Hasher>(degree: usize) -> Pregenerated<H> {
assert_eq!(degree, 6 + 8);
let mut rng = thread_rng();
let size = degree * 4 * 1024 * 1024;
let data: Vec<u8> = (0..size)
.flat_map(|_| fr_into_bytes::<Bls12>(&Fr::random(&mut rng)))
.collect();
let replica_id: H::Domain = H::Domain::random(&mut rng);
let graph = StackedBucketGraph::<H>::new_stacked(size, 6, 8, new_seed()).unwrap();
Pregenerated {
data,
replica_id,
graph,
}
}
fn kdf_benchmark(c: &mut Criterion) {
let degree = 14;
let Pregenerated {
data,
replica_id,
graph,
} = pregenerate_data::<Sha256Hasher>(degree);
let mut group = c.benchmark_group("kdf");
group.sample_size(10);
group.throughput(Throughput::Bytes(
/* replica id + 37 parents + node id */ 39 * 32,
));
group.bench_function("exp", |b| {
let mut raw_data = data.clone();
raw_data.extend_from_slice(&data);
let (data, exp_data) = raw_data.split_at_mut(data.len());
let graph = &graph;
let replica_id = replica_id.clone();
b.iter(|| black_box(create_label_exp(graph, &replica_id, &*exp_data, data, 1)))
});
group.bench_function("non-exp", |b| {
let mut data = data.clone();
let graph = &graph;
let replica_id = replica_id.clone();
b.iter(|| black_box(create_label(graph, &replica_id, &mut data, 1)))
});
group.finish();
}
criterion_group!(benches, kdf_benchmark);
criterion_main!(benches);
| 29.068493 | 89 | 0.636192 |
c10e19a8f4df04d77e741f0d4cafcb59d921a373 | 15,758 | //! Rust implementation of JMESPath, a query language for JSON.
//!
//! # Compiling JMESPath expressions
//!
//! Use the `jmespath::compile` function to compile JMESPath expressions
//! into reusable `Expression` structs. The `Expression` struct can be
//! used multiple times on different values without having to recompile
//! the expression.
//!
//! ```
//! use jmespath;
//!
//! let expr = jmespath::compile("foo.bar | baz").unwrap();
//!
//! // Parse some JSON data into a JMESPath variable
//! let json_str = "{\"foo\":{\"bar\":{\"baz\":true}}}";
//! let data = jmespath::Variable::from_json(json_str).unwrap();
//!
//! // Search the data with the compiled expression
//! let result = expr.search(data).unwrap();
//! assert_eq!(true, result.as_boolean().unwrap());
//! ```
//!
//! You can get the original expression as a string and the parsed expression
//! AST from the `Expression` struct:
//!
//! ```
//! use jmespath;
//! use jmespath::ast::Ast;
//!
//! let expr = jmespath::compile("foo").unwrap();
//! assert_eq!("foo", expr.as_str());
//! assert_eq!(&Ast::Field {name: "foo".to_string(), offset: 0}, expr.as_ast());
//! ```
//!
//! ## JMESPath variables
//!
//! In order to evaluate expressions against a known data type, the
//! `jmespath::Variable` enum is used as both the input and output type.
//! More specifically, `Rcvar` (or `jmespath::Rcvar`) is used to allow
//! shared, reference counted data to be used by the JMESPath interpreter at
//! runtime.
//!
//! By default, `Rcvar` is an `std::rc::Rc<Variable>`. However, by specifying
//! the `sync` feature, you can utilize an `std::sync::Arc<Variable>` to
//! share `Expression` structs across threads.
//!
//! Any type that implements `jmespath::ToJmespath` can be used in a JMESPath
//! Expression. Various types have default `ToJmespath` implementations,
//! including `serde::ser::Serialize`. Because `jmespath::Variable` implements
//! `serde::ser::Serialize`, many existing types can be searched without needing
//! an explicit coercion, and any type that needs coercion can be implemented
//! using serde's macros or code generation capabilities. This includes a
//! number of common types, including serde's `serde_json::Value` enum.
//!
//! The return value of searching data with JMESPath is also an `Rcvar`.
//! `Variable` has a number of helper methods that make it a data type that
//! can be used directly, or you can convert `Variable` to any serde value
//! implementing `serde::de::Deserialize`.
//!
//! # Custom Functions
//!
//! You can register custom functions with a JMESPath expression by using
//! a custom `Runtime`. When you call `jmespath::compile`, you are using a
//! shared `Runtime` instance that is created lazily using `lazy_static`.
//! This shared `Runtime` utilizes all of the builtin JMESPath functions
//! by default. However, custom functions may be utilized by creating a custom
//! `Runtime` and compiling expressions directly from the `Runtime`.
//!
//! ```
//! use jmespath::{Runtime, Context, Rcvar};
//! use jmespath::functions::{CustomFunction, Signature, ArgumentType};
//!
//! // Create a new Runtime and register the builtin JMESPath functions.
//! let mut runtime = Runtime::new();
//! runtime.register_builtin_functions();
//!
//! // Create an identity string function that returns string values as-is.
//! runtime.register_function("str_identity", Box::new(CustomFunction::new(
//! Signature::new(vec![ArgumentType::String], None),
//! Box::new(|args: &[Rcvar], _: &mut Context| Ok(args[0].clone()))
//! )));
//!
//! // You can also use normal closures as functions.
//! runtime.register_function("identity",
//! Box::new(|args: &[Rcvar], _: &mut Context| Ok(args[0].clone())));
//!
//! let expr = runtime.compile("str_identity('foo')").unwrap();
//! assert_eq!("foo", expr.search(()).unwrap().as_string().unwrap());
//!
//! let expr = runtime.compile("identity('bar')").unwrap();
//! assert_eq!("bar", expr.search(()).unwrap().as_string().unwrap());
//! ```
#![cfg_attr(feature = "specialized", feature(specialization))]
pub use crate::errors::{ErrorReason, JmespathError, RuntimeError};
pub use crate::parser::{parse, ParseResult};
pub use crate::runtime::Runtime;
pub use crate::variable::Variable;
pub mod ast;
pub mod functions;
use serde::ser;
#[cfg(feature = "specialized")]
use serde_json::Value;
#[cfg(feature = "specialized")]
use std::convert::TryInto;
use std::fmt;
use lazy_static::*;
use crate::ast::Ast;
use crate::interpreter::{interpret, SearchResult};
mod errors;
mod interpreter;
mod lexer;
mod parser;
mod runtime;
mod variable;
lazy_static! {
pub static ref DEFAULT_RUNTIME: Runtime = {
let mut runtime = Runtime::new();
runtime.register_builtin_functions();
runtime
};
}
/// `Rc` reference counted JMESPath `Variable`.
#[cfg(not(feature = "sync"))]
pub type Rcvar = std::rc::Rc<Variable>;
/// `Arc` reference counted JMESPath `Variable`.
#[cfg(feature = "sync")]
pub type Rcvar = std::sync::Arc<Variable>;
/// Compiles a JMESPath expression using the default Runtime.
///
/// The default Runtime is created lazily the first time it is dereferenced
/// by using the `lazy_static` macro.
///
/// The provided expression is expected to adhere to the JMESPath
/// grammar: http://jmespath.org/specification.html
#[inline]
pub fn compile(expression: &str) -> Result<Expression<'static>, JmespathError> {
DEFAULT_RUNTIME.compile(expression)
}
/// Converts a value into a reference-counted JMESPath Variable.
///
#[cfg_attr(
feature = "specialized",
doc = "\
There is a generic serde Serialize implementation, and since this
documentation was compiled with the `specialized` feature turned
**on**, there are also a number of specialized implementations for
`ToJmespath` built into the library that should work for most
cases."
)]
#[cfg_attr(
not(feature = "specialized"),
doc = "\
There is a generic serde Serialize implementation. Since this
documentation was compiled with the `specialized` feature turned
**off**, this is the only implementation available.
(If the `specialized` feature were turned on, there there would be
a number of additional specialized implementations for `ToJmespath`
built into the library that should work for most cases.)"
)]
pub trait ToJmespath {
fn to_jmespath(self) -> Result<Rcvar, JmespathError>;
}
/// Create searchable values from Serde serializable values.
impl<'a, T: ser::Serialize> ToJmespath for T {
#[cfg(not(feature = "specialized"))]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Variable::from_serializable(self).map(Rcvar::new)?)
}
#[cfg(feature = "specialized")]
default fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Variable::from_serializable(self).map(|var| Rcvar::new(var))?)
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for Value {
#[inline]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
self.try_into().map(|var: Variable| Rcvar::new(var))
}
}
#[cfg(feature = "specialized")]
impl<'a> ToJmespath for &'a Value {
#[inline]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
self.try_into().map(|var: Variable| Rcvar::new(var))
}
}
#[cfg(feature = "specialized")]
/// Identity coercion.
impl ToJmespath for Rcvar {
#[inline]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(self)
}
}
#[cfg(feature = "specialized")]
impl<'a> ToJmespath for &'a Rcvar {
#[inline]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(self.clone())
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for Variable {
#[inline]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(self))
}
}
#[cfg(feature = "specialized")]
impl<'a> ToJmespath for &'a Variable {
#[inline]
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(self.clone()))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for String {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::String(self)))
}
}
#[cfg(feature = "specialized")]
impl<'a> ToJmespath for &'a str {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::String(self.to_owned())))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for i8 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for i16 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for i32 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for i64 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for u8 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for u16 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for u32 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for u64 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for isize {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for usize {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(serde_json::Number::from(self))))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for f32 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
(self as f64).to_jmespath()
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for f64 {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Number(
serde_json::Number::from_f64(self).ok_or_else(|| {
JmespathError::new(
"",
0,
ErrorReason::Parse(format!("Cannot parse {} into a Number", self)),
)
})?,
)))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for () {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Null))
}
}
#[cfg(feature = "specialized")]
impl ToJmespath for bool {
fn to_jmespath(self) -> Result<Rcvar, JmespathError> {
Ok(Rcvar::new(Variable::Bool(self)))
}
}
/// A compiled JMESPath expression.
///
/// The compiled expression can be used multiple times without incurring
/// the cost of re-parsing the expression each time. The expression may
/// be shared between threads if JMESPath is compiled with the `sync`
/// feature, which forces the use of an `Arc` instead of an `Rc` for
/// runtime variables.
#[derive(Clone)]
pub struct Expression<'a> {
ast: Ast,
expression: String,
runtime: &'a Runtime,
}
impl<'a> Expression<'a> {
/// Creates a new JMESPath expression.
///
/// Normally you will create expressions using either `jmespath::compile()`
/// or using a jmespath::Runtime.
#[inline]
pub fn new<S>(expression: S, ast: Ast, runtime: &'a Runtime) -> Expression<'a>
where
S: Into<String>,
{
Expression {
expression: expression.into(),
ast,
runtime,
}
}
/// Returns the result of searching data with the compiled expression.
///
/// The SearchResult contains a JMESPath Rcvar, or a reference counted
/// Variable. This value can be used directly like a JSON object.
/// Alternatively, Variable does implement Serde serialzation and
/// deserialization, so it can easily be marshalled to another type.
pub fn search<T: ToJmespath>(&self, data: T) -> SearchResult {
let mut ctx = Context::new(&self.expression, self.runtime);
interpret(&data.to_jmespath()?, &self.ast, &mut ctx)
}
/// Returns the JMESPath expression from which the Expression was compiled.
///
/// Note that this is the same value that is returned by calling
/// `to_string`.
pub fn as_str(&self) -> &str {
&self.expression
}
/// Returns the AST of the parsed JMESPath expression.
///
/// This can be useful for debugging purposes, caching, etc.
pub fn as_ast(&self) -> &Ast {
&self.ast
}
}
impl<'a> fmt::Display for Expression<'a> {
/// Shows the jmespath expression as a string.
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl<'a> fmt::Debug for Expression<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> PartialEq for Expression<'a> {
fn eq(&self, other: &Expression<'_>) -> bool {
self.as_str() == other.as_str()
}
}
/// Context object used for error reporting.
///
/// The Context struct is mostly used when interacting between the
/// interpreter and function implemenations. Unless you're writing custom
/// JMESPath functions, this struct is an implementation detail.
pub struct Context<'a> {
/// Expression string that is being interpreted.
pub expression: &'a str,
/// JMESPath runtime used to compile the expression and call functions.
pub runtime: &'a Runtime,
/// Ast offset that is currently being evaluated.
pub offset: usize,
}
impl<'a> Context<'a> {
/// Create a new context struct.
#[inline]
pub fn new(expression: &'a str, runtime: &'a Runtime) -> Context<'a> {
Context {
expression,
runtime,
offset: 0,
}
}
}
#[cfg(test)]
mod test {
use super::ast::Ast;
use super::*;
#[test]
fn formats_expression_as_string_or_debug() {
let expr = compile("foo | baz").unwrap();
assert_eq!("foo | baz/foo | baz", format!("{}/{:?}", expr, expr));
}
#[test]
fn implements_partial_eq() {
let a = compile("@").unwrap();
let b = compile("@").unwrap();
assert!(a == b);
}
#[test]
fn can_evaluate_jmespath_expression() {
let expr = compile("foo.bar").unwrap();
let var = Variable::from_json("{\"foo\":{\"bar\":true}}").unwrap();
assert_eq!(Rcvar::new(Variable::Bool(true)), expr.search(var).unwrap());
}
#[test]
fn can_get_expression_ast() {
let expr = compile("foo").unwrap();
assert_eq!(
&Ast::Field {
offset: 0,
name: "foo".to_string(),
},
expr.as_ast()
);
}
#[test]
fn test_creates_rcvar_from_tuple_serialization() {
use super::ToJmespath;
let t = (true, false);
assert_eq!("[true,false]", t.to_jmespath().unwrap().to_string());
}
#[test]
fn expression_clone() {
let expr = compile("foo").unwrap();
let _ = expr.clone();
}
}
| 30.777344 | 87 | 0.644308 |
c1fbd46f641b51edf7830857ebf7f438d82aad23 | 906 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
mod cluster;
mod match_seq;
mod user_defined_function;
mod user_grant;
mod user_info;
mod user_privilege;
mod user_quota;
mod user_stage;
#[test]
fn test_bin_commit_version() -> anyhow::Result<()> {
let v = &common_meta_types::config::DATABEND_COMMIT_VERSION;
assert!(v.len() > 0);
Ok(())
}
| 30.2 | 76 | 0.727373 |
1e425a9821793b6ac20f0ea9cfbf8f53989e8883 | 3,171 | //! Variable header in MQTT
use std::convert::From;
use std::error::Error;
use std::fmt;
use std::io;
use std::string::FromUtf8Error;
use crate::encodable::StringEncodeError;
use crate::topic_name::TopicNameError;
pub use self::connect_ack_flags::ConnackFlags;
pub use self::connect_flags::ConnectFlags;
pub use self::connect_ret_code::ConnectReturnCode;
pub use self::keep_alive::KeepAlive;
pub use self::packet_identifier::PacketIdentifier;
pub use self::protocol_level::ProtocolLevel;
pub use self::protocol_name::ProtocolName;
pub use self::topic_name::TopicNameHeader;
mod packet_identifier;
mod protocol_name;
pub mod protocol_level;
mod connect_flags;
mod keep_alive;
mod connect_ack_flags;
mod connect_ret_code;
mod topic_name;
/// Errors while decoding variable header
#[derive(Debug)]
pub enum VariableHeaderError {
IoError(io::Error),
StringEncodeError(StringEncodeError),
InvalidReservedFlag,
FromUtf8Error(FromUtf8Error),
TopicNameError(TopicNameError),
}
impl From<io::Error> for VariableHeaderError {
fn from(err: io::Error) -> VariableHeaderError {
VariableHeaderError::IoError(err)
}
}
impl From<FromUtf8Error> for VariableHeaderError {
fn from(err: FromUtf8Error) -> VariableHeaderError {
VariableHeaderError::FromUtf8Error(err)
}
}
impl From<StringEncodeError> for VariableHeaderError {
fn from(err: StringEncodeError) -> VariableHeaderError {
VariableHeaderError::StringEncodeError(err)
}
}
impl From<TopicNameError> for VariableHeaderError {
fn from(err: TopicNameError) -> VariableHeaderError {
VariableHeaderError::TopicNameError(err)
}
}
impl fmt::Display for VariableHeaderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&VariableHeaderError::IoError(ref err) => write!(f, "{}", err),
&VariableHeaderError::StringEncodeError(ref err) => write!(f, "{}", err),
&VariableHeaderError::InvalidReservedFlag => write!(f, "Invalid reserved flags"),
&VariableHeaderError::FromUtf8Error(ref err) => write!(f, "{}", err),
&VariableHeaderError::TopicNameError(ref err) => write!(f, "{}", err),
}
}
}
impl Error for VariableHeaderError {
fn description(&self) -> &str {
match self {
&VariableHeaderError::IoError(ref err) => err.description(),
&VariableHeaderError::StringEncodeError(ref err) => err.description(),
&VariableHeaderError::InvalidReservedFlag => "Invalid reserved flags",
&VariableHeaderError::FromUtf8Error(ref err) => err.description(),
&VariableHeaderError::TopicNameError(ref err) => err.description(),
}
}
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
&VariableHeaderError::IoError(ref err) => Some(err),
&VariableHeaderError::StringEncodeError(ref err) => Some(err),
&VariableHeaderError::InvalidReservedFlag => None,
&VariableHeaderError::FromUtf8Error(ref err) => Some(err),
&VariableHeaderError::TopicNameError(ref err) => Some(err),
}
}
}
| 32.690722 | 93 | 0.685904 |
dded41ded4c5a32e3a0bb1f4b15b6291f59e9f89 | 11,017 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_arrow::arrow::array::ArrayRef;
use common_arrow::arrow::bitmap::Bitmap;
use common_arrow::arrow::bitmap::MutableBitmap;
use common_arrow::arrow::compute::cast;
use common_arrow::arrow::compute::cast::CastOptions as ArrowOption;
use common_datavalues::prelude::*;
use common_datavalues::with_match_primitive_type_id;
use common_exception::ErrorCode;
use common_exception::Result;
use common_io::prelude::FormatSettings;
use super::cast_from_datetimes::cast_from_date;
use super::cast_from_string::cast_from_string;
use super::cast_from_variant::cast_from_variant;
use crate::scalars::expressions::cast_from_datetimes::cast_from_timestamp;
use crate::scalars::FunctionContext;
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct CastOptions {
pub exception_mode: ExceptionMode,
pub parsing_mode: ParsingMode,
}
pub const DEFAULT_CAST_OPTIONS: CastOptions = CastOptions {
exception_mode: ExceptionMode::Throw,
parsing_mode: ParsingMode::Strict,
};
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum ExceptionMode {
/// Throw exception if value cannot be parsed.
Throw,
/// Fill with zero or default if value cannot be parsed.
Zero,
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum ParsingMode {
Strict,
Partial,
}
impl CastOptions {
fn as_arrow(&self) -> ArrowOption {
ArrowOption {
wrapped: true,
partial: self.parsing_mode == ParsingMode::Partial,
}
}
}
pub fn cast_column_field(
column_with_field: &ColumnWithField,
from_type: &DataTypeImpl,
target_type: &DataTypeImpl,
func_ctx: &FunctionContext,
) -> Result<ColumnRef> {
cast_with_type(
column_with_field.column(),
from_type,
target_type,
&DEFAULT_CAST_OPTIONS,
func_ctx,
)
}
// No logical type is specified
// Use Default options
pub fn default_column_cast(column: &ColumnRef, data_type: &DataTypeImpl) -> Result<ColumnRef> {
let func_ctx = FunctionContext::default();
cast_with_type(
column,
&column.data_type(),
data_type,
&DEFAULT_CAST_OPTIONS,
&func_ctx,
)
}
pub fn cast_with_type(
column: &ColumnRef,
from_type: &DataTypeImpl,
target_type: &DataTypeImpl,
cast_options: &CastOptions,
func_ctx: &FunctionContext,
) -> Result<ColumnRef> {
// they are pyhsically the same type
if &column.data_type() == target_type {
return Ok(column.clone());
}
if target_type.data_type_id() == TypeID::Null {
return Ok(Arc::new(NullColumn::new(column.len())));
}
if from_type.data_type_id() == TypeID::Null {
//all is null
if target_type.is_nullable() {
return target_type.create_constant_column(&DataValue::Null, column.len());
}
return Err(ErrorCode::BadDataValueType(
"Can't cast column from null into non-nullable type".to_string(),
));
}
if column.is_const() {
let col: &ConstColumn = Series::check_get(column)?;
let inner = col.inner();
let res = cast_with_type(inner, from_type, target_type, cast_options, func_ctx)?;
return Ok(ConstColumn::new(res, column.len()).arc());
}
let nonull_from_type = remove_nullable(from_type);
let nonull_data_type = remove_nullable(target_type);
let (result, valids) = match nonull_from_type.data_type_id() {
TypeID::String => cast_from_string(
column,
&nonull_from_type,
&nonull_data_type,
cast_options,
func_ctx,
),
TypeID::Date => cast_from_date(
column,
&nonull_from_type,
&nonull_data_type,
cast_options,
func_ctx,
),
TypeID::Timestamp => cast_from_timestamp(
column,
&nonull_from_type,
&nonull_data_type,
cast_options,
func_ctx,
),
TypeID::Variant | TypeID::VariantArray | TypeID::VariantObject => {
cast_from_variant(column, &nonull_data_type, func_ctx)
}
_ => arrow_cast_compute(
column,
&nonull_from_type,
&nonull_data_type,
cast_options,
func_ctx,
),
}?;
// check date/timestamp bound
if nonull_data_type.data_type_id() == TypeID::Date {
let viewer = i32::try_create_viewer(&result)?;
for x in viewer {
check_date(x)?;
}
} else if nonull_data_type.data_type_id() == TypeID::Timestamp {
let viewer = i64::try_create_viewer(&result)?;
for x in viewer {
check_timestamp(x)?;
}
} else if nonull_data_type.data_type_id() == TypeID::Array {
return Err(ErrorCode::BadDataValueType(format!(
"Cast error happens in casting from {} to {}",
from_type.name(),
target_type.name()
)));
}
let (all_nulls, source_valids) = column.validity();
let bitmap = combine_validities_2(source_valids.cloned(), valids);
if target_type.is_nullable() {
return Ok(NullableColumn::wrap_inner(result, bitmap));
}
if let Some(bitmap) = bitmap {
let null_cnt = bitmap.null_count();
let source_null_cnt = match (all_nulls, source_valids) {
(true, _) => column.len(),
(false, None) => 0,
(false, Some(b)) => b.null_count(),
};
if cast_options.exception_mode == ExceptionMode::Throw
&& (from_type.is_nullable() && null_cnt > source_null_cnt)
|| (!from_type.is_nullable() && null_cnt > 0)
{
// TODO get the data to error msg
return Err(ErrorCode::BadDataValueType(format!(
"Cast error happens in casting from {} to {}",
from_type.name(),
target_type.name()
)));
}
}
Ok(result)
}
pub fn cast_to_variant(
column: &ColumnRef,
from_type: &DataTypeImpl,
data_type: &DataTypeImpl,
_func_ctx: &FunctionContext,
) -> Result<(ColumnRef, Option<Bitmap>)> {
let column = Series::remove_nullable(column);
let size = column.len();
if data_type.data_type_id() == TypeID::VariantArray {
return Err(ErrorCode::BadDataValueType(format!(
"Expression type does not match column data type, expecting ARRAY but got {}",
from_type.data_type_id()
)));
} else if data_type.data_type_id() == TypeID::VariantObject {
return Err(ErrorCode::BadDataValueType(format!(
"Expression type does not match column data type, expecting OBJECT but got {}",
from_type.data_type_id()
)));
}
let mut builder = ColumnBuilder::<VariantValue>::with_capacity(size);
if from_type.data_type_id().is_numeric() || from_type.data_type_id() == TypeID::Boolean {
let serializer = from_type.create_serializer();
let format = FormatSettings::default();
match serializer.serialize_json_object(&column, None, &format) {
Ok(values) => {
for v in values {
builder.append(&VariantValue::from(v));
}
}
Err(e) => return Err(e),
}
return Ok((builder.build(size), None));
}
// other data types can't automatically casted to variant
return Err(ErrorCode::BadDataValueType(format!(
"Expression type does not match column data type, expecting VARIANT but got {}",
from_type.data_type_id()
)));
}
pub fn cast_to_timestamp(
column: &ColumnRef,
from_type: &DataTypeImpl,
) -> Result<(ColumnRef, Option<Bitmap>)> {
let column = Series::remove_nullable(column);
let size = column.len();
let mut builder = ColumnBuilder::<i64>::with_capacity(size);
with_match_primitive_type_id!(from_type.data_type_id(), |$T| {
let col: &PrimitiveColumn<$T> = Series::check_get(&column)?;
for v in col.iter() {
// The value is treated as a number of seconds, milliseconds or microseconds
// depending on the size of the numbers.
// If the value is less than 31536000000, it is treated as a number of seconds,
// If the value is greater than or equal to 31536000000 and less than 31536000000000,
// it is treated as milliseconds.
// If the value is greater than or equal to 31536000000000, it is treated as microseconds.
let val = *v as i64;
if val < 31536000000 {
builder.append(val * 1000000);
} else if val < 31536000000000 {
builder.append(val * 1000);
} else {
builder.append(val);
}
}
}, {
if from_type.data_type_id() == TypeID::Boolean {
let col: &BooleanColumn = Series::check_get(&column)?;
for v in col.iter() {
if v {
builder.append(1i64);
} else {
builder.append(0i64);
}
}
} else {
return Err(ErrorCode::BadDataValueType(format!(
"Cast error happens in casting from {} to Timestamp.",
from_type.data_type_id()
)));
}
});
return Ok((builder.build(size), None));
}
// cast using arrow's cast compute
pub fn arrow_cast_compute(
column: &ColumnRef,
from_type: &DataTypeImpl,
data_type: &DataTypeImpl,
cast_options: &CastOptions,
func_ctx: &FunctionContext,
) -> Result<(ColumnRef, Option<Bitmap>)> {
if data_type.data_type_id().is_variant() {
return cast_to_variant(column, from_type, data_type, func_ctx);
} else if data_type.data_type_id() == TypeID::Timestamp {
return cast_to_timestamp(column, from_type);
}
let arrow_array = column.as_arrow_array();
let arrow_options = cast_options.as_arrow();
let result = cast::cast(arrow_array.as_ref(), &data_type.arrow_type(), arrow_options)?;
let result: ArrayRef = Arc::from(result);
let bitmap = result.validity().cloned();
Ok((result.into_column(), bitmap))
}
pub fn new_mutable_bitmap(size: usize, valid: bool) -> MutableBitmap {
let mut bitmap = MutableBitmap::with_capacity(size);
bitmap.extend_constant(size, valid);
bitmap
}
| 33.384848 | 102 | 0.620768 |
1d5d2d1815ac023b01f842d552476e45ba797282 | 191 | //! Types that pin data to its location in memory.
//!
//! For more documentation see [`std::pin`](https://doc.rust-lang.org/std/pin/index.html).
#[doc(inline)]
pub use std::pin::Pin;
| 27.285714 | 91 | 0.649215 |
5be827f20ba0e7ca8dab4e1a2ad1bcbb358a59c1 | 3,394 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::{anyhow, format_err, Context, Error},
ffx_core::ffx_plugin,
ffx_test_args::TestCommand,
fidl::endpoints::create_proxy,
fidl_fuchsia_test::CaseIteratorMarker,
fidl_fuchsia_test_manager as ftest_manager,
run_test_suite_lib::diagnostics,
std::io::{stdout, Write},
};
#[ffx_plugin(ftest_manager::HarnessProxy = "core/appmgr:out:fuchsia.test.manager.Harness")]
pub async fn test(
harness_proxy: ftest_manager::HarnessProxy,
cmd: TestCommand,
) -> Result<(), Error> {
let writer = Box::new(stdout());
let count = cmd.count.unwrap_or(1);
let count = std::num::NonZeroU16::new(count)
.ok_or_else(|| anyhow!("--count should be greater than zero."))?;
if cmd.list {
get_tests(harness_proxy, writer, &cmd.test_url).await
} else {
match run_test_suite_lib::run_tests_and_get_outcome(
run_test_suite_lib::TestParams {
test_url: cmd.test_url,
timeout: cmd.timeout.and_then(std::num::NonZeroU32::new),
test_filter: cmd.test_filter,
also_run_disabled_tests: cmd.run_disabled,
parallel: cmd.parallel,
test_args: vec![],
harness: harness_proxy,
},
diagnostics::LogCollectionOptions {
min_severity: cmd.min_severity_logs,
max_severity: cmd.max_severity_logs,
},
count,
cmd.filter_ansi,
)
.await
{
run_test_suite_lib::Outcome::Passed => Ok(()),
run_test_suite_lib::Outcome::Timedout => Err(anyhow!("Tests timed out")),
run_test_suite_lib::Outcome::Failed
| run_test_suite_lib::Outcome::Inconclusive
| run_test_suite_lib::Outcome::Error => {
Err(anyhow!("There was an error running tests"))
}
}
}
}
async fn get_tests<W: Write>(
harness_proxy: ftest_manager::HarnessProxy,
mut write: W,
suite_url: &String,
) -> Result<(), Error> {
let writer = &mut write;
let (suite_proxy, suite_server_end) = create_proxy().unwrap();
let (_controller_proxy, controller_server_end) = create_proxy().unwrap();
log::info!("launching test suite {}", suite_url);
let _result = harness_proxy
.launch_suite(
&suite_url,
ftest_manager::LaunchOptions::EMPTY,
suite_server_end,
controller_server_end,
)
.await
.context("launch_suite call failed")?
.map_err(|e| format_err!("error launching test: {:?}", e))?;
let (case_iterator, test_server_end) = create_proxy::<CaseIteratorMarker>()?;
suite_proxy
.get_tests(test_server_end)
.map_err(|e| format_err!("Error getting test steps: {}", e))?;
loop {
let cases = case_iterator.get_next().await?;
if cases.is_empty() {
return Ok(());
}
writeln!(writer, "Tests in suite {}:\n", suite_url)?;
for case in cases {
match case.name {
Some(n) => writeln!(writer, "{}", n)?,
None => writeln!(writer, "<No name>")?,
};
}
}
}
| 33.94 | 91 | 0.591927 |
ffedd76d01087f984d27f8ce8e14be43dd5c4966 | 16,041 | use std::collections::VecDeque;
use crate::{
api::inject_endpoints,
db::{DatabaseCidr, DatabasePeer},
util::{form_body, json_response, status_response},
Context, ServerError, Session,
};
use hyper::{Body, Method, Request, Response, StatusCode};
use shared::{EndpointContents, PeerContents, RedeemContents, State, REDEEM_TRANSITION_WAIT};
use wgctrl::DeviceUpdate;
pub async fn routes(
req: Request<Body>,
mut components: VecDeque<String>,
session: Session,
) -> Result<Response<Body>, ServerError> {
match (req.method(), components.pop_front().as_deref()) {
(&Method::GET, Some("state")) => {
if !session.user_capable() {
return Err(ServerError::Unauthorized);
}
handlers::state(session).await
},
(&Method::POST, Some("redeem")) => {
if !session.redeemable() {
return Err(ServerError::Unauthorized);
}
let form = form_body(req).await?;
handlers::redeem(form, session).await
},
(&Method::PUT, Some("endpoint")) => {
if !session.user_capable() {
return Err(ServerError::Unauthorized);
}
let form = form_body(req).await?;
handlers::endpoint(form, session).await
},
(&Method::PUT, Some("candidates")) => {
if !session.user_capable() {
return Err(ServerError::Unauthorized);
}
let form = form_body(req).await?;
handlers::candidates(form, session).await
},
_ => Err(ServerError::NotFound),
}
}
mod handlers {
use shared::Endpoint;
use super::*;
/// Get the current state of the network, in the eyes of the current peer.
///
/// This endpoint returns the visible CIDRs and Peers, providing all the necessary
/// information for the peer to create connections to all of them.
pub async fn state(session: Session) -> Result<Response<Body>, ServerError> {
let conn = session.context.db.lock();
let selected_peer = DatabasePeer::get(&conn, session.peer.id)?;
let cidrs: Vec<_> = DatabaseCidr::list(&conn)?;
let mut peers: Vec<_> = selected_peer
.get_all_allowed_peers(&conn)?
.into_iter()
.map(|p| p.inner)
.collect();
inject_endpoints(&session, &mut peers);
json_response(State { peers, cidrs })
}
/// Redeems an invitation. An invitation includes a WireGuard keypair generated by either the server
/// or a peer with admin rights.
///
/// Redemption is the process of an invitee generating their own keypair and exchanging their temporary
/// key with their permanent one.
///
/// Until this API endpoint is called, the invited peer will not show up to other peers, and once
/// it is called and succeeds, it cannot be called again.
pub async fn redeem(
form: RedeemContents,
session: Session,
) -> Result<Response<Body>, ServerError> {
let conn = session.context.db.lock();
let mut selected_peer = DatabasePeer::get(&conn, session.peer.id)?;
let old_public_key = wgctrl::Key::from_base64(&selected_peer.public_key)
.map_err(|_| ServerError::WireGuard)?;
selected_peer.redeem(&conn, &form.public_key)?;
if cfg!(not(test)) {
let Context {
interface, backend, ..
} = session.context;
// If we were to modify the WireGuard interface immediately, the HTTP response wouldn't
// get through. Instead, we need to wait a reasonable amount for the HTTP response to
// flush, then update the interface.
//
// The client is also expected to wait the same amount of time after receiving a success
// response from /redeem.
//
// This might be avoidable if we were able to run code after we were certain the response
// had flushed over the TCP socket, but that isn't easily accessible from this high-level
// web framework.
tokio::task::spawn(async move {
tokio::time::sleep(*REDEEM_TRANSITION_WAIT).await;
log::info!(
"WireGuard: adding new peer {}, removing old pubkey {}",
&*selected_peer,
old_public_key.to_base64()
);
DeviceUpdate::new()
.remove_peer_by_key(&old_public_key)
.add_peer((&*selected_peer).into())
.apply(&interface, backend)
.map_err(|e| log::error!("{:?}", e))
.ok();
});
}
status_response(StatusCode::NO_CONTENT)
}
/// Report any other endpoint candidates that can be tried by peers to connect.
/// Currently limited to 10 candidates max.
pub async fn candidates(
contents: Vec<Endpoint>,
session: Session,
) -> Result<Response<Body>, ServerError> {
if contents.len() > 10 {
return status_response(StatusCode::PAYLOAD_TOO_LARGE);
}
let conn = session.context.db.lock();
let mut selected_peer = DatabasePeer::get(&conn, session.peer.id)?;
selected_peer.update(
&conn,
PeerContents {
candidates: contents,
..selected_peer.contents.clone()
},
)?;
status_response(StatusCode::NO_CONTENT)
}
/// Force a specific endpoint to be reported by the server.
pub async fn endpoint(
contents: EndpointContents,
session: Session,
) -> Result<Response<Body>, ServerError> {
let conn = session.context.db.lock();
let mut selected_peer = DatabasePeer::get(&conn, session.peer.id)?;
selected_peer.update(
&conn,
PeerContents {
endpoint: contents.into(),
..selected_peer.contents.clone()
},
)?;
status_response(StatusCode::NO_CONTENT)
}
}
#[cfg(test)]
mod tests {
use std::time::{Duration, SystemTime};
use super::*;
use crate::{db::DatabaseAssociation, test};
use bytes::Buf;
use shared::{AssociationContents, CidrContents, Endpoint, EndpointContents, Error};
#[tokio::test]
async fn test_get_state_from_developer1() -> Result<(), Error> {
let server = test::Server::new()?;
let res = server
.request(test::DEVELOPER1_PEER_IP, "GET", "/v1/user/state")
.await;
assert_eq!(res.status(), StatusCode::OK);
let whole_body = hyper::body::aggregate(res).await?;
let State { peers, .. } = serde_json::from_reader(whole_body.reader())?;
let mut peer_names = peers.iter().map(|p| &*p.contents.name).collect::<Vec<_>>();
peer_names.sort_unstable();
// Developers should see only peers in infra CIDR and developer CIDR.
assert_eq!(
&["developer1", "developer2", "innernet-server"],
&peer_names[..]
);
Ok(())
}
#[tokio::test]
async fn test_override_endpoint() -> Result<(), Error> {
let server = test::Server::new()?;
assert_eq!(
server
.form_request(
test::DEVELOPER1_PEER_IP,
"PUT",
"/v1/user/endpoint",
&EndpointContents::Set("1.1.1.1:51820".parse().unwrap())
)
.await
.status(),
StatusCode::NO_CONTENT
);
println!("{}", serde_json::to_string(&EndpointContents::Unset)?);
assert_eq!(
server
.form_request(
test::DEVELOPER1_PEER_IP,
"PUT",
"/v1/user/endpoint",
&EndpointContents::Unset,
)
.await
.status(),
StatusCode::NO_CONTENT
);
assert_eq!(
server
.form_request(
test::DEVELOPER1_PEER_IP,
"PUT",
"/v1/user/endpoint",
"endpoint=blah",
)
.await
.status(),
StatusCode::BAD_REQUEST
);
Ok(())
}
#[tokio::test]
async fn test_list_peers_from_unknown_ip() -> Result<(), Error> {
let server = test::Server::new()?;
// Request comes from an unknown IP.
let res = server.request("10.80.80.80", "GET", "/v1/user/state").await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
Ok(())
}
#[tokio::test]
async fn test_list_peers_for_developer_subcidr() -> Result<(), Error> {
let server = test::Server::new()?;
{
let db = server.db.lock();
let cidr = DatabaseCidr::create(
&db,
CidrContents {
name: "experiment cidr".to_string(),
cidr: test::EXPERIMENTAL_CIDR.parse()?,
parent: Some(test::ROOT_CIDR_ID),
},
)?;
let subcidr = DatabaseCidr::create(
&db,
CidrContents {
name: "experiment subcidr".to_string(),
cidr: test::EXPERIMENTAL_SUBCIDR.parse()?,
parent: Some(cidr.id),
},
)?;
DatabasePeer::create(
&db,
test::peer_contents(
"experiment-peer",
test::EXPERIMENT_SUBCIDR_PEER_IP,
subcidr.id,
false,
)?,
)?;
// Add a peering between the developer's CIDR and the experimental *parent* cidr.
DatabaseAssociation::create(
&db,
AssociationContents {
cidr_id_1: test::DEVELOPER_CIDR_ID,
cidr_id_2: cidr.id,
},
)?;
DatabaseAssociation::create(
&db,
AssociationContents {
cidr_id_1: test::INFRA_CIDR_ID,
cidr_id_2: cidr.id,
},
)?;
}
for ip in &[test::DEVELOPER1_PEER_IP, test::EXPERIMENT_SUBCIDR_PEER_IP] {
let res = server.request(ip, "GET", "/v1/user/state").await;
assert_eq!(res.status(), StatusCode::OK);
let whole_body = hyper::body::aggregate(res).await?;
let State { peers, .. } = serde_json::from_reader(whole_body.reader())?;
let mut peer_names = peers.iter().map(|p| &*p.contents.name).collect::<Vec<_>>();
peer_names.sort_unstable();
// Developers should see only peers in infra CIDR and developer CIDR.
assert_eq!(
&[
"developer1",
"developer2",
"experiment-peer",
"innernet-server"
],
&peer_names[..]
);
}
Ok(())
}
#[tokio::test]
async fn test_redeem() -> Result<(), Error> {
let server = test::Server::new()?;
let experimental_cidr = DatabaseCidr::create(
&server.db().lock(),
CidrContents {
name: "experimental".to_string(),
cidr: test::EXPERIMENTAL_CIDR.parse()?,
parent: Some(test::ROOT_CIDR_ID),
},
)?;
let mut peer_contents = test::peer_contents(
"experiment-peer",
test::EXPERIMENT_SUBCIDR_PEER_IP,
experimental_cidr.id,
false,
)?;
peer_contents.is_redeemed = false;
peer_contents.invite_expires = Some(SystemTime::now() + Duration::from_secs(100));
let _experiment_peer = DatabasePeer::create(&server.db().lock(), peer_contents)?;
// Step 1: Ensure that before redeeming, other endpoints aren't yet accessible.
let res = server
.request(test::EXPERIMENT_SUBCIDR_PEER_IP, "GET", "/v1/user/state")
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
// Step 2: Ensure that redemption works.
let body = RedeemContents {
public_key: "YBVIgpfLbi/knrMCTEb0L6eVy0daiZnJJQkxBK9s+2I=".into(),
};
let res = server
.form_request(
test::EXPERIMENT_SUBCIDR_PEER_IP,
"POST",
"/v1/user/redeem",
&body,
)
.await;
assert!(res.status().is_success());
// Step 3: Ensure that a second attempt at redemption DOESN'T work.
let res = server
.form_request(
test::EXPERIMENT_SUBCIDR_PEER_IP,
"POST",
"/v1/user/redeem",
&body,
)
.await;
assert!(res.status().is_client_error());
// Step 3: Ensure that after redemption, fetching state works.
let res = server
.request(test::EXPERIMENT_SUBCIDR_PEER_IP, "GET", "/v1/user/state")
.await;
assert_eq!(res.status(), StatusCode::OK);
Ok(())
}
#[tokio::test]
async fn test_redeem_expired() -> Result<(), Error> {
let server = test::Server::new()?;
let experimental_cidr = DatabaseCidr::create(
&server.db().lock(),
CidrContents {
name: "experimental".to_string(),
cidr: test::EXPERIMENTAL_CIDR.parse()?,
parent: Some(test::ROOT_CIDR_ID),
},
)?;
let mut peer_contents = test::peer_contents(
"experiment-peer",
test::EXPERIMENT_SUBCIDR_PEER_IP,
experimental_cidr.id,
false,
)?;
peer_contents.is_redeemed = false;
peer_contents.invite_expires = Some(SystemTime::now() - Duration::from_secs(1));
let _experiment_peer = DatabasePeer::create(&server.db().lock(), peer_contents)?;
// Step 1: Ensure that before redeeming, other endpoints aren't yet accessible.
let res = server
.request(test::EXPERIMENT_SUBCIDR_PEER_IP, "GET", "/v1/user/state")
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
// Step 2: Ensure that redemption works.
let body = RedeemContents {
public_key: "YBVIgpfLbi/knrMCTEb0L6eVy0daiZnJJQkxBK9s+2I=".into(),
};
let res = server
.form_request(
test::EXPERIMENT_SUBCIDR_PEER_IP,
"POST",
"/v1/user/redeem",
&body,
)
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
Ok(())
}
#[tokio::test]
async fn test_candidates() -> Result<(), Error> {
let server = test::Server::new()?;
let peer = DatabasePeer::get(&server.db().lock(), test::DEVELOPER1_PEER_ID)?;
assert_eq!(peer.candidates, vec![]);
let candidates = vec!["1.1.1.1:51820".parse::<Endpoint>().unwrap()];
assert_eq!(
server
.form_request(
test::DEVELOPER1_PEER_IP,
"PUT",
"/v1/user/candidates",
&candidates
)
.await
.status(),
StatusCode::NO_CONTENT
);
let res = server
.request(test::DEVELOPER1_PEER_IP, "GET", "/v1/user/state")
.await;
assert_eq!(res.status(), StatusCode::OK);
let peer = DatabasePeer::get(&server.db().lock(), test::DEVELOPER1_PEER_ID)?;
assert_eq!(peer.candidates, candidates);
Ok(())
}
}
| 34.422747 | 107 | 0.526214 |
62b46d3146009068343c70c7afe8b9fbb66e0ca9 | 503 | use serde::Deserialize;
use crate::util::config::ConfigElement;
use crate::util::parsing::deserialize_base_url;
#[derive(Debug, Deserialize)]
pub struct User {
pub anonymous_access: bool,
pub user_registration: bool,
}
impl ConfigElement for User {
const KEY: &'static str = "user";
}
#[derive(Debug, Deserialize)]
pub struct Odm {
#[serde(deserialize_with = "deserialize_base_url")]
pub endpoint: url::Url,
}
impl ConfigElement for Odm {
const KEY: &'static str = "odm";
}
| 20.12 | 55 | 0.697813 |
3311bd8b9d4e295e89d4d2d6d07e4f5e5c7e6be1 | 328 | #[doc = "Reader of register US_LONBL"]
pub type R = crate::R<u32, super::US_LONBL>;
#[doc = "Reader of field `LONBL`"]
pub type LONBL_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:5 - LON Node Backlog Value"]
#[inline(always)]
pub fn lonbl(&self) -> LONBL_R {
LONBL_R::new((self.bits & 0x3f) as u8)
}
}
| 27.333333 | 48 | 0.591463 |
0e8b2e2383285efcba4df3cff8c8496ab569e482 | 1,927 | use std::process::Command;
use super::error::ConvertError;
use super::Convert;
use std::path::{Path, PathBuf};
pub struct MscsbConverter;
impl super::Converter for MscsbConverter {
fn get_conversion(&self, file_extension: &str, _: &Path) -> Convert {
match file_extension {
"mscsb" => Convert::From,
"c" => Convert::To,
_ => Convert::None
}
}
fn convert_from(&self, path: &Path, _: Option<&str>) -> Result<PathBuf, ConvertError> {
let mut outpath = PathBuf::from(path.clone());
outpath.set_extension("c");
let out = Command::new("python3")
.arg("mscdec/mscdec.py")
.arg("-x")
.arg("mscdec/mscinfo.xml")
.arg("-c")
.arg(path)
.arg("-o")
.arg(&outpath)
.output()?;
if !out.status.success() {
Err(ConvertError::msc(
&(String::from(
std::str::from_utf8(&out.stdout[..])?) + "\n" +
std::str::from_utf8(&out.stderr[..])?
)))
}
else {
Ok(PathBuf::from(outpath))
}
}
fn convert_to(&self, path: &Path, _: Option<&str>) -> Result<PathBuf, ConvertError> {
let mut outpath = PathBuf::from(path.clone());
outpath.set_extension("mscsb");
let out = Command::new("python3")
.arg("msclang/msclang.py")
.arg("-x")
.arg("msclang/mscinfo.xml")
.arg(path)
.arg("-o")
.arg(&outpath)
.output()?;
if !out.status.success() {
Err(ConvertError::msc(
&(String::from(
std::str::from_utf8(&out.stdout[..])?) + "\n" +
std::str::from_utf8(&out.stderr[..])?
)))
}
else {
Ok(PathBuf::from(outpath))
}
}
}
| 30.109375 | 91 | 0.468085 |
efb1820aeb02a0d185171aa574b66fb97c645cb9 | 1,092 | pub trait Tree<T: Ord> {
/// Return the number of nodes.
fn get_size(&self) -> usize;
/// Return true if the tree contains 0 nodes.
fn is_empty(&self) -> bool;
/// Return true if the tree contains the given value.
fn search(&self, value: &T) -> bool;
/// Insert the given value as a node in the tree.
fn insert(&mut self, value: T);
/// Return the height of the tree or None if it's empty.
/// The height is the number of node between the root and the furthest leaf node.
fn get_height(&self) -> Option<isize>;
/// Return the maximum value of the tree or None if it's empty.
fn get_max(&self) -> Option<&T>;
/// Return the minimum value of the tree or None if it's empty.
fn get_min(&self) -> Option<&T>;
/// Delete and return the minimum value of the tree or None if it's empty.
fn delete_max(&mut self) -> Option<T>;
/// Delete and return the maximum value of the tree or None if it's empty.
fn delete_min(&mut self) -> Option<T>;
/// Delete the given value.
fn delete(&mut self, value: &T);
} | 34.125 | 85 | 0.630952 |
2f23dd703d384be2e8266d8b2640ca362b8633dc | 21,515 | //! JSON Web Encryption (JWE) represents encrypted content using JSON-based data structures.
//!
//! See [RFC7516](https://tools.ietf.org/html/rfc7516).
use crate::{
jose::jwk::Jwk,
key::{PrivateKey, PublicKey},
};
use aes_gcm::{aead::generic_array::typenum::Unsigned, AeadInPlace, Aes128Gcm, Aes256Gcm, NewAead};
use base64::DecodeError;
use digest::generic_array::GenericArray;
use rand::RngCore;
use rsa::{PaddingScheme, PublicKey as RsaPublicKeyInterface, RSAPrivateKey, RSAPublicKey};
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, convert::TryFrom};
use thiserror::Error;
type Aes192Gcm = aes_gcm::AesGcm<aes_gcm::aes::Aes192, aes_gcm::aead::generic_array::typenum::U12>;
// === error type === //
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum JweError {
/// RSA error
#[error("RSA error: {context}")]
Rsa { context: String },
/// AES-GCM error (opaque)
#[error("AES-GCM error (opaque)")]
AesGcm,
/// Json error
#[error("JSON error: {source}")]
Json { source: serde_json::Error },
/// Key error
#[error("Key error: {source}")]
Key { source: crate::key::KeyError },
/// Invalid token encoding
#[error("input isn't a valid token string: {input}")]
InvalidEncoding { input: String },
/// Couldn't decode base64
#[error("couldn't decode base64: {source}")]
Base64Decoding { source: DecodeError },
/// Input isn't valid utf8
#[error("input isn't valid utf8: {source}, input: {input:?}")]
InvalidUtf8 {
source: std::string::FromUtf8Error,
input: Vec<u8>,
},
/// Unsupported algorithm
#[error("unsupported algorithm: {algorithm}")]
UnsupportedAlgorithm { algorithm: String },
/// Invalid size
#[error("invalid size for {ty}: expected {expected}, got {got}")]
InvalidSize {
ty: &'static str,
expected: usize,
got: usize,
},
}
impl From<rsa::errors::Error> for JweError {
fn from(e: rsa::errors::Error) -> Self {
Self::Rsa { context: e.to_string() }
}
}
impl From<aes_gcm::Error> for JweError {
fn from(_: aes_gcm::Error) -> Self {
Self::AesGcm
}
}
impl From<serde_json::Error> for JweError {
fn from(e: serde_json::Error) -> Self {
Self::Json { source: e }
}
}
impl From<crate::key::KeyError> for JweError {
fn from(e: crate::key::KeyError) -> Self {
Self::Key { source: e }
}
}
impl From<DecodeError> for JweError {
fn from(e: DecodeError) -> Self {
Self::Base64Decoding { source: e }
}
}
// === JWE algorithms === //
/// `alg` header parameter values for JWE used to determine the Content Encryption Key (CEK)
///
/// [JSON Web Algorithms (JWA) draft-ietf-jose-json-web-algorithms-40 #4](https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40#section-4.1)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum JweAlg {
/// RSAES-PKCS1-V1_5
///
/// Recommended- by RFC
#[serde(rename = "RSA1_5")]
RsaPkcs1v15,
/// RSAES OAEP using default parameters
///
/// Recommended+ by RFC
#[serde(rename = "RSA-OAEP")]
RsaOaep,
/// RSAES OAEP using SHA-256 and MGF1 with SHA-256
#[serde(rename = "RSA-OAEP-256")]
RsaOaep256,
/// AES Key Wrap with default initial value using 128 bit key (unsupported)
///
/// Recommended by RFC
#[serde(rename = "A128KW")]
AesKeyWrap128,
/// AES Key Wrap with default initial value using 192 bit key (unsupported)
#[serde(rename = "A192KW")]
AesKeyWrap192,
/// AES Key Wrap with default initial value using 256 bit key (unsupported)
///
/// Recommended by RFC
#[serde(rename = "A256KW")]
AesKeyWrap256,
/// Direct use of a shared symmetric key as the CEK
#[serde(rename = "dir")]
Direct,
/// Elliptic Curve Diffie-Hellman Ephemeral Static key agreement using Concat KDF (unsupported)
///
/// Recommended+ by RFC
#[serde(rename = "ECDH-ES")]
EcdhEs,
/// ECDH-ES using Concat KDF and CEK wrapped with "A128KW" (unsupported)
///
/// Recommended by RFC
///
/// Additional header used: "epk", "apu", "apv"
#[serde(rename = "ECDH-ES+A128KW")]
EcdhEsAesKeyWrap128,
/// ECDH-ES using Concat KDF and CEK wrapped with "A192KW" (unsupported)
///
/// Additional header used: "epk", "apu", "apv"
#[serde(rename = "ECDH-ES+A192KW")]
EcdhEsAesKeyWrap192,
/// ECDH-ES using Concat KDF and CEK wrapped with "A256KW" (unsupported)
///
/// Recommended by RFC
///
/// Additional header used: "epk", "apu", "apv"
#[serde(rename = "ECDH-ES+A256KW")]
EcdhEsAesKeyWrap256,
}
// === JWE header === //
/// `enc` header parameter values for JWE to encrypt content
///
/// [JSON Web Algorithms (JWA) draft-ietf-jose-json-web-algorithms-40 #5](https://www.rfc-editor.org/rfc/rfc7518.html#section-5.1)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum JweEnc {
/// AES_128_CBC_HMAC_SHA_256 authenticated encryption algorithm. (unsupported)
///
/// Required by RFC
#[serde(rename = "A128CBC-HS256")]
Aes128CbcHmacSha256,
/// AES_192_CBC_HMAC_SHA_384 authenticated encryption algorithm. (unsupported)
#[serde(rename = "A192CBC-HS384")]
Aes192CbcHmacSha384,
/// AES_256_CBC_HMAC_SHA_512 authenticated encryption algorithm. (unsupported)
///
/// Required by RFC
#[serde(rename = "A256CBC-HS512")]
Aes256CbcHmacSha512,
/// AES GCM using 128-bit key.
///
/// Recommended by RFC
#[serde(rename = "A128GCM")]
Aes128Gcm,
/// AES GCM using 192-bit key.
#[serde(rename = "A192GCM")]
Aes192Gcm,
/// AES GCM using 256-bit key.
///
/// Recommended by RFC
#[serde(rename = "A256GCM")]
Aes256Gcm,
}
impl JweEnc {
pub fn key_size(self) -> usize {
match self {
Self::Aes128CbcHmacSha256 | Self::Aes128Gcm => <Aes128Gcm as NewAead>::KeySize::to_usize(),
Self::Aes192CbcHmacSha384 | Self::Aes192Gcm => <Aes192Gcm as NewAead>::KeySize::to_usize(),
Self::Aes256CbcHmacSha512 | Self::Aes256Gcm => <Aes256Gcm as NewAead>::KeySize::to_usize(),
}
}
pub fn nonce_size(self) -> usize {
match self {
Self::Aes128CbcHmacSha256 | Self::Aes128Gcm => <Aes128Gcm as AeadInPlace>::NonceSize::to_usize(),
Self::Aes192CbcHmacSha384 | Self::Aes192Gcm => <Aes192Gcm as AeadInPlace>::NonceSize::to_usize(),
Self::Aes256CbcHmacSha512 | Self::Aes256Gcm => <Aes256Gcm as AeadInPlace>::NonceSize::to_usize(),
}
}
pub fn tag_size(self) -> usize {
match self {
Self::Aes128CbcHmacSha256 | Self::Aes128Gcm => <Aes128Gcm as AeadInPlace>::TagSize::to_usize(),
Self::Aes192CbcHmacSha384 | Self::Aes192Gcm => <Aes192Gcm as AeadInPlace>::TagSize::to_usize(),
Self::Aes256CbcHmacSha512 | Self::Aes256Gcm => <Aes256Gcm as AeadInPlace>::TagSize::to_usize(),
}
}
}
// === JWE header === //
/// JWE specific part of JOSE header
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct JweHeader {
// -- specific to JWE -- //
/// Algorithm used to encrypt or determine the Content Encryption Key (CEK) (key wrapping...)
pub alg: JweAlg,
/// Content encryption algorithm to use
///
/// This must be a *symmetric* Authenticated Encryption with Associated Data (AEAD) algorithm.
pub enc: JweEnc,
// -- common with JWS -- //
/// JWK Set URL
///
/// URI that refers to a resource for a set of JSON-encoded public keys,
/// one of which corresponds to the key used to digitally sign the JWK.
#[serde(skip_serializing_if = "Option::is_none")]
pub jku: Option<String>,
/// JSON Web Key
///
/// The public key that corresponds to the key used to digitally sign the JWS.
/// This key is represented as a JSON Web Key (JWK).
#[serde(skip_serializing_if = "Option::is_none")]
pub jwk: Option<Jwk>,
/// Type header
///
/// Used by JWE applications to declare the media type [IANA.MediaTypes] of this complete JWE.
#[serde(skip_serializing_if = "Option::is_none")]
pub typ: Option<String>,
/// Content Type header
///
/// Used by JWE applications to declare the media type [IANA.MediaTypes] of the secured content (the payload).
#[serde(skip_serializing_if = "Option::is_none")]
pub cty: Option<String>,
// -- common with all -- //
/// Key ID Header
///
/// A hint indicating which key was used.
#[serde(skip_serializing_if = "Option::is_none")]
pub kid: Option<String>,
/// X.509 URL Header
///
/// URI that refers to a resource for an X.509 public key certificate or certificate chain.
#[serde(skip_serializing_if = "Option::is_none")]
pub x5u: Option<String>,
/// X.509 Certificate Chain
///
/// Chain of one or more PKIX certificates.
#[serde(skip_serializing_if = "Option::is_none")]
pub x5c: Option<Vec<String>>,
/// X.509 Certificate SHA-1 Thumbprint
///
/// base64url-encoded SHA-1 thumbprint (a.k.a. digest) of the DER encoding of an X.509 certificate.
#[serde(skip_serializing_if = "Option::is_none")]
pub x5t: Option<String>,
/// X.509 Certificate SHA-256 Thumbprint
///
/// base64url-encoded SHA-256 thumbprint (a.k.a. digest) of the DER encoding of an X.509 certificate.
#[serde(rename = "x5t#S256", alias = "x5t#s256", skip_serializing_if = "Option::is_none")]
pub x5t_s256: Option<String>,
}
impl JweHeader {
pub fn new(alg: JweAlg, enc: JweEnc) -> Self {
Self {
alg,
enc,
jku: None,
jwk: None,
typ: None,
cty: None,
kid: None,
x5u: None,
x5c: None,
x5t: None,
x5t_s256: None,
}
}
}
// === json web encryption === //
/// Provides an API to encrypt any kind of data (binary). JSON claims are part of `Jwt` only.
#[derive(Debug, Clone)]
pub struct Jwe {
pub header: JweHeader,
pub payload: Vec<u8>,
}
impl Jwe {
pub fn new(alg: JweAlg, enc: JweEnc, payload: Vec<u8>) -> Self {
Self {
header: JweHeader::new(alg, enc),
payload,
}
}
/// Encode with CEK encrypted and included in the token using asymmetric cryptography.
pub fn encode(self, asymmetric_key: &PublicKey) -> Result<String, JweError> {
encode_impl(self, EncoderMode::Normal(asymmetric_key))
}
/// Encode with provided CEK (a symmetric key). This will ignore `alg` value and override it with "dir".
pub fn encode_direct(self, cek: &[u8]) -> Result<String, JweError> {
encode_impl(self, EncoderMode::Direct(cek))
}
/// Encode with CEK encrypted and included in the token using asymmetric cryptography.
pub fn decode(encoded_token: &str, key: &PrivateKey) -> Result<Jwe, JweError> {
decode_impl(encoded_token, DecoderMode::Normal(key))
}
/// Decode with provided CEK (a symmetric key).
pub fn decode_direct(encoded_token: &str, cek: &[u8]) -> Result<Jwe, JweError> {
decode_impl(encoded_token, DecoderMode::Direct(cek))
}
}
// encoder
#[derive(Debug, Clone)]
enum EncoderMode<'a> {
Normal(&'a PublicKey),
Direct(&'a [u8]),
}
fn encode_impl(jwe: Jwe, mode: EncoderMode) -> Result<String, JweError> {
let mut header = jwe.header;
let (encrypted_key_base64, jwe_cek) = match mode {
EncoderMode::Direct(symmetric_key) => {
if symmetric_key.len() != header.enc.key_size() {
return Err(JweError::InvalidSize {
ty: "symmetric key",
expected: header.enc.key_size(),
got: symmetric_key.len(),
});
}
// Override `alg` header with "dir"
header.alg = JweAlg::Direct;
(
base64::encode_config(&[], base64::URL_SAFE_NO_PAD),
Cow::Borrowed(symmetric_key),
)
}
EncoderMode::Normal(public_key) => {
// Currently, only rsa is supported
let rsa_public_key = RSAPublicKey::try_from(public_key)?;
let mut rng = rand::rngs::OsRng;
let mut symmetric_key = vec![0u8; header.enc.key_size()];
rng.fill_bytes(&mut symmetric_key);
let padding = match header.alg {
JweAlg::RsaPkcs1v15 => PaddingScheme::new_pkcs1v15_encrypt(),
JweAlg::RsaOaep => PaddingScheme::new_oaep::<sha1::Sha1>(),
JweAlg::RsaOaep256 => PaddingScheme::new_oaep::<sha2::Sha256>(),
unsupported => {
return Err(JweError::UnsupportedAlgorithm {
algorithm: format!("{:?}", unsupported),
})
}
};
let encrypted_key = rsa_public_key.encrypt(&mut rng, padding, &symmetric_key)?;
(
base64::encode_config(&encrypted_key, base64::URL_SAFE_NO_PAD),
Cow::Owned(symmetric_key),
)
}
};
let mut buffer = jwe.payload;
let nonce = <aes_gcm::aead::Nonce<_> as From<[u8; 12]>>::from(rand::random()); // 96-bits nonce for AES-GCM
let aad = b""; // The Additional Authenticated Data value used is the empty octet string for AES-GCM.
let authentication_tag = match header.enc {
JweEnc::Aes128Gcm => {
Aes128Gcm::new(GenericArray::from_slice(&jwe_cek)).encrypt_in_place_detached(&nonce, aad, &mut buffer)?
}
JweEnc::Aes192Gcm => {
Aes192Gcm::new(GenericArray::from_slice(&jwe_cek)).encrypt_in_place_detached(&nonce, aad, &mut buffer)?
}
JweEnc::Aes256Gcm => {
Aes256Gcm::new(GenericArray::from_slice(&jwe_cek)).encrypt_in_place_detached(&nonce, aad, &mut buffer)?
}
unsupported => {
return Err(JweError::UnsupportedAlgorithm {
algorithm: format!("{:?}", unsupported),
})
}
};
let protected_header_base64 = base64::encode_config(&serde_json::to_vec(&header)?, base64::URL_SAFE_NO_PAD);
let initialization_vector_base64 = base64::encode_config(nonce.as_slice(), base64::URL_SAFE_NO_PAD);
let ciphertext_base64 = base64::encode_config(&buffer, base64::URL_SAFE_NO_PAD);
let authentication_tag_base64 = base64::encode_config(&authentication_tag, base64::URL_SAFE_NO_PAD);
Ok([
protected_header_base64,
encrypted_key_base64,
initialization_vector_base64,
ciphertext_base64,
authentication_tag_base64,
]
.join("."))
}
// decoder
#[derive(Debug, Clone)]
enum DecoderMode<'a> {
Normal(&'a PrivateKey),
Direct(&'a [u8]),
}
struct Parts {
protected_header: Vec<u8>,
encrypted_key: Vec<u8>,
initialization_vector: Vec<u8>,
ciphertext: Vec<u8>,
authentication_tag: Vec<u8>,
}
impl Parts {
fn break_down(encoded_token: &str) -> Option<Self> {
let mut split = encoded_token.splitn(5, '.');
Some(Parts {
protected_header: base64::decode_config(split.next()?, base64::URL_SAFE_NO_PAD).ok()?,
encrypted_key: base64::decode_config(split.next()?, base64::URL_SAFE_NO_PAD).ok()?,
initialization_vector: base64::decode_config(split.next()?, base64::URL_SAFE_NO_PAD).ok()?,
ciphertext: base64::decode_config(split.next()?, base64::URL_SAFE_NO_PAD).ok()?,
authentication_tag: base64::decode_config(split.next()?, base64::URL_SAFE_NO_PAD).ok()?,
})
}
}
fn decode_impl<'a>(encoded_token: &str, mode: DecoderMode<'a>) -> Result<Jwe, JweError> {
let parts = Parts::break_down(encoded_token).ok_or_else(|| JweError::InvalidEncoding {
input: encoded_token.to_owned(),
})?;
let header = serde_json::from_slice::<JweHeader>(&parts.protected_header)?;
let jwe_cek = match mode {
DecoderMode::Direct(symmetric_key) => Cow::Borrowed(symmetric_key),
DecoderMode::Normal(private_key) => {
let rsa_private_key = RSAPrivateKey::try_from(private_key)?;
let padding = match header.alg {
JweAlg::RsaPkcs1v15 => PaddingScheme::new_pkcs1v15_encrypt(),
JweAlg::RsaOaep => PaddingScheme::new_oaep::<sha1::Sha1>(),
JweAlg::RsaOaep256 => PaddingScheme::new_oaep::<sha2::Sha256>(),
unsupported => {
return Err(JweError::UnsupportedAlgorithm {
algorithm: format!("{:?}", unsupported),
})
}
};
let decrypted_key = rsa_private_key.decrypt(padding, &parts.encrypted_key)?;
Cow::Owned(decrypted_key)
}
};
if jwe_cek.len() != header.enc.key_size() {
return Err(JweError::InvalidSize {
ty: "symmetric key",
expected: header.enc.key_size(),
got: jwe_cek.len(),
});
}
if parts.initialization_vector.len() != header.enc.nonce_size() {
return Err(JweError::InvalidSize {
ty: "initialization vector (nonce)",
expected: header.enc.nonce_size(),
got: parts.initialization_vector.len(),
});
}
if parts.authentication_tag.len() != header.enc.tag_size() {
return Err(JweError::InvalidSize {
ty: "authentication tag",
expected: header.enc.tag_size(),
got: parts.authentication_tag.len(),
});
}
let mut buffer = parts.ciphertext;
let nonce = GenericArray::from_slice(&parts.initialization_vector);
let aad = b""; // The Additional Authenticated Data value used is the empty octet string for AES-GCM.
match header.enc {
JweEnc::Aes128Gcm => Aes128Gcm::new(GenericArray::from_slice(&jwe_cek)).decrypt_in_place_detached(
&nonce,
aad,
&mut buffer,
GenericArray::from_slice(&parts.authentication_tag),
)?,
JweEnc::Aes192Gcm => Aes192Gcm::new(GenericArray::from_slice(&jwe_cek)).decrypt_in_place_detached(
&nonce,
aad,
&mut buffer,
GenericArray::from_slice(&parts.authentication_tag),
)?,
JweEnc::Aes256Gcm => Aes256Gcm::new(GenericArray::from_slice(&jwe_cek)).decrypt_in_place_detached(
&nonce,
aad,
&mut buffer,
GenericArray::from_slice(&parts.authentication_tag),
)?,
unsupported => {
return Err(JweError::UnsupportedAlgorithm {
algorithm: format!("{:?}", unsupported),
})
}
};
Ok(Jwe {
header,
payload: buffer,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{key::PrivateKey, pem::Pem};
fn get_private_key_1() -> PrivateKey {
let pk_pem = crate::test_files::RSA_2048_PK_1.parse::<Pem>().unwrap();
PrivateKey::from_pem(&pk_pem).unwrap()
}
fn get_private_key_2() -> PrivateKey {
let pk_pem = crate::test_files::RSA_2048_PK_7
.parse::<Pem>()
.expect("private key pem");
PrivateKey::from_pem(&pk_pem).expect("private_key")
}
#[test]
fn rsa_oaep_aes_128_gcm() {
let payload = "何だと?……無駄な努力だ?……百も承知だ!だがな、勝つ望みがある時ばかり、戦うのとは訳が違うぞ!"
.as_bytes()
.to_vec();
let private_key = get_private_key_1();
let public_key = private_key.to_public_key();
let jwe = Jwe::new(JweAlg::RsaOaep, JweEnc::Aes128Gcm, payload);
let encoded = jwe.clone().encode(&public_key).unwrap();
let decoded = Jwe::decode(&encoded, &private_key).unwrap();
assert_eq!(jwe.payload, decoded.payload);
assert_eq!(jwe.header, decoded.header);
}
#[test]
fn rsa_pkcs1v15_aes_128_gcm_bad_key() {
let payload = "そうとも! 負けると知って戦うのが、遙かに美しいのだ!"
.as_bytes()
.to_vec();
let private_key = get_private_key_1();
let public_key = get_private_key_2().to_public_key();
let jwe = Jwe::new(JweAlg::RsaPkcs1v15, JweEnc::Aes128Gcm, payload);
let encoded = jwe.clone().encode(&public_key).unwrap();
let err = Jwe::decode(&encoded, &private_key).err().unwrap();
assert_eq!(err.to_string(), "RSA error: decryption error");
}
#[test]
fn direct_aes_256_gcm() {
let payload = "さあ、取れ、取るがいい!だがな、貴様たちがいくら騒いでも、あの世へ、俺が持って行くものが一つある!それはな…".as_bytes().to_vec();
let key = "わたしの……心意気だ!!";
let jwe = Jwe::new(JweAlg::Direct, JweEnc::Aes256Gcm, payload);
let encoded = jwe.clone().encode_direct(key.as_bytes()).unwrap();
let decoded = Jwe::decode_direct(&encoded, key.as_bytes()).unwrap();
assert_eq!(jwe.payload, decoded.payload);
assert_eq!(jwe.header, decoded.header);
}
#[test]
fn direct_aes_192_gcm_bad_key() {
let payload = "和解をしよう? 俺が? 真っ平だ! 真っ平御免だ!".as_bytes().to_vec();
let jwe = Jwe::new(JweAlg::Direct, JweEnc::Aes192Gcm, payload);
let encoded = jwe.clone().encode_direct(b"abcdefghabcdefghabcdefgh").unwrap();
let err = Jwe::decode_direct(&encoded, b"zzzzzzzzabcdefghzzzzzzzz").err().unwrap();
assert_eq!(err.to_string(), "AES-GCM error (opaque)");
}
}
| 32.947933 | 153 | 0.608878 |
69f45c421f0a1bfe4c9ce0f83a6db743377ac2b7 | 774 | use crate::support::{basic_lib_manifest, project};
#[test]
fn edition_works_for_build_script() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = 'foo'
version = '0.1.0'
edition = '2018'
[build-dependencies]
a = { path = 'a' }
"#,
)
.file("src/lib.rs", "")
.file(
"build.rs",
r#"
fn main() {
a::foo();
}
"#,
)
.file("a/Cargo.toml", &basic_lib_manifest("a"))
.file("a/src/lib.rs", "pub fn foo() {}")
.build();
p.cargo("build -v").masquerade_as_nightly_cargo().run();
}
| 23.454545 | 60 | 0.379845 |
e87fa3e0f78581756b77655038f00a7ef91776a1 | 14,737 | // Copyright 2022 pyke.io
// 2019-2021 Tauri Programme within The Commons Conservancy
// [https://tauri.studio/]
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{
fs::File,
io::Read,
path::{Path, PathBuf},
process::Command,
str::FromStr
};
use anyhow::Context;
#[cfg(target_os = "linux")]
use heck::ToKebabCase;
use millennium_bundler::{AppCategory, BundleBinary, BundleSettings, DebianSettings, MacOsSettings, PackageSettings, UpdaterSettings, WindowsSettings};
use serde::Deserialize;
use crate::{
helpers::{
app_paths::millennium_dir,
config::{wix_settings, Config},
manifest::Manifest,
Logger
},
CommandExt
};
/// The `workspace` section of the app configuration (read from Cargo.toml).
#[derive(Clone, Debug, Deserialize)]
struct WorkspaceSettings {
/// the workspace members.
members: Option<Vec<String>>
}
#[derive(Clone, Debug, Deserialize)]
struct BinarySettings {
name: String,
path: Option<String>
}
/// The package settings.
#[derive(Debug, Clone, Deserialize)]
pub struct CargoPackageSettings {
/// the package's name.
pub name: Option<String>,
/// the package's version.
pub version: Option<String>,
/// the package's description.
pub description: Option<String>,
/// the package's homepage.
pub homepage: Option<String>,
/// the package's authors.
pub authors: Option<Vec<String>>,
/// the default binary to run.
pub default_run: Option<String>
}
/// The Cargo settings (Cargo.toml root descriptor).
#[derive(Clone, Debug, Deserialize)]
struct CargoSettings {
/// the package settings.
///
/// it's optional because ancestor workspace Cargo.toml files may not have package info.
package: Option<CargoPackageSettings>,
/// the workspace settings.
///
/// it's present if the read Cargo.toml belongs to a workspace root.
workspace: Option<WorkspaceSettings>,
/// the binary targets configuration.
bin: Option<Vec<BinarySettings>>
}
impl CargoSettings {
/// Try to load a set of CargoSettings from a "Cargo.toml" file in the specified directory.
fn load(dir: &Path) -> crate::Result<Self> {
let toml_path = dir.join("Cargo.toml");
let mut toml_str = String::new();
let mut toml_file = File::open(toml_path).with_context(|| "failed to open Cargo.toml")?;
toml_file.read_to_string(&mut toml_str).with_context(|| "failed to read Cargo.toml")?;
toml::from_str(&toml_str)
.with_context(|| "failed to parse Cargo.toml")
.map_err(Into::into)
}
}
#[derive(Deserialize)]
struct CargoBuildConfig {
#[serde(rename = "target-dir")]
target_dir: Option<String>
}
#[derive(Deserialize)]
struct CargoConfig {
build: Option<CargoBuildConfig>
}
pub fn build_project(runner: String, args: Vec<String>) -> crate::Result<()> {
let mut command = Command::new(&runner);
command.args(&["build", "--features=custom-protocol"]).args(args);
command.pipe()?;
let status = command.status().with_context(|| format!("failed to run {}", runner))?;
if !status.success() {
return Err(anyhow::anyhow!(format!("Result of `{} build` operation was unsuccessful: {}", runner, status)));
}
Ok(())
}
pub struct AppSettings {
cargo_settings: CargoSettings,
cargo_package_settings: CargoPackageSettings,
package_settings: PackageSettings
}
impl AppSettings {
pub fn new(config: &Config) -> crate::Result<Self> {
let cargo_settings = CargoSettings::load(&millennium_dir()).with_context(|| "failed to load cargo settings")?;
let cargo_package_settings = match &cargo_settings.package {
Some(package_info) => package_info.clone(),
None => return Err(anyhow::anyhow!("No package info in the config file".to_owned(),))
};
let package_settings = PackageSettings {
product_name: config.package.product_name.clone().unwrap_or_else(|| {
cargo_package_settings
.name
.clone()
.expect("Cargo manifest must have the `package.name` field")
}),
version: config.package.version.clone().unwrap_or_else(|| {
cargo_package_settings
.version
.clone()
.expect("Cargo manifest must have the `package.version` field")
}),
description: cargo_package_settings.description.clone().unwrap_or_default(),
homepage: cargo_package_settings.homepage.clone(),
authors: cargo_package_settings.authors.clone(),
default_run: cargo_package_settings.default_run.clone()
};
Ok(Self {
cargo_settings,
cargo_package_settings,
package_settings
})
}
pub fn cargo_package_settings(&self) -> &CargoPackageSettings {
&self.cargo_package_settings
}
pub fn get_bundle_settings(&self, config: &Config, manifest: &Manifest) -> crate::Result<BundleSettings> {
millennium_config_to_bundle_settings(
manifest,
config.millennium.bundle.clone(),
config.millennium.system_tray.clone(),
config.millennium.updater.clone()
)
}
pub fn get_out_dir(&self, target: Option<String>, debug: bool) -> crate::Result<PathBuf> {
let millennium_dir = millennium_dir();
let workspace_dir = get_workspace_dir(&millennium_dir);
get_target_dir(&workspace_dir, target, !debug)
}
pub fn get_package_settings(&self) -> PackageSettings {
self.package_settings.clone()
}
pub fn get_binaries(&self, config: &Config) -> crate::Result<Vec<BundleBinary>> {
let mut binaries: Vec<BundleBinary> = vec![];
if let Some(bin) = &self.cargo_settings.bin {
let default_run = self.package_settings.default_run.clone().unwrap_or_else(|| "".to_string());
for binary in bin {
binaries.push(
if Some(&binary.name) == self.cargo_package_settings.name.as_ref() || binary.name.as_str() == default_run {
BundleBinary::new(config.package.binary_name().unwrap_or_else(|| binary.name.clone()), true)
} else {
BundleBinary::new(binary.name.clone(), false)
}
.set_src_path(binary.path.clone())
)
}
}
let mut bins_path = millennium_dir();
bins_path.push("src/bin");
if let Ok(fs_bins) = std::fs::read_dir(bins_path) {
for entry in fs_bins {
let path = entry?.path();
if let Some(name) = path.file_stem() {
let bin_exists = binaries
.iter()
.any(|bin| bin.name() == name || path.ends_with(bin.src_path().unwrap_or(&"".to_string())));
if !bin_exists {
binaries.push(BundleBinary::new(name.to_string_lossy().to_string(), false))
}
}
}
}
if let Some(default_run) = self.package_settings.default_run.as_ref() {
match binaries.iter_mut().find(|bin| bin.name() == default_run) {
Some(bin) => {
if let Some(bin_name) = config.package.binary_name() {
bin.set_name(bin_name);
}
}
None => {
binaries.push(BundleBinary::new(config.package.binary_name().unwrap_or_else(|| default_run.to_string()), true));
}
}
}
match binaries.len() {
0 => binaries.push(BundleBinary::new(
#[cfg(target_os = "linux")]
self.package_settings.product_name.to_kebab_case(),
#[cfg(not(target_os = "linux"))]
self.package_settings.product_name.clone(),
true
)),
1 => binaries.get_mut(0).unwrap().set_main(true),
_ => {}
}
Ok(binaries)
}
}
/// This function determines where 'target' dir is and suffixes it with 'release' or 'debug'
/// to determine where the compiled binary will be located.
fn get_target_dir(project_root_dir: &Path, target: Option<String>, is_release: bool) -> crate::Result<PathBuf> {
let mut path: PathBuf = match std::env::var_os("CARGO_TARGET_DIR") {
Some(target_dir) => target_dir.into(),
None => {
let mut root_dir = project_root_dir.to_path_buf();
let target_path: Option<PathBuf> = loop {
// cargo reads configs under .cargo/config.toml or .cargo/config
let mut cargo_config_path = root_dir.join(".cargo/config");
if !cargo_config_path.exists() {
cargo_config_path = root_dir.join(".cargo/config.toml");
}
// if the path exists, parse it
if cargo_config_path.exists() {
let mut config_str = String::new();
let mut config_file = File::open(&cargo_config_path).with_context(|| format!("failed to open {:?}", cargo_config_path))?;
config_file
.read_to_string(&mut config_str)
.with_context(|| "failed to read cargo config file")?;
let config: CargoConfig = toml::from_str(&config_str).with_context(|| "failed to parse cargo config file")?;
if let Some(build) = config.build {
if let Some(target_dir) = build.target_dir {
break Some(target_dir.into());
}
}
}
if !root_dir.pop() {
break None;
}
};
target_path.unwrap_or_else(|| project_root_dir.join("target"))
}
};
if let Some(ref triple) = target {
path.push(triple);
}
path.push(if is_release { "release" } else { "debug" });
Ok(path)
}
/// Walks up the file system, looking for a Cargo.toml file
/// If one is found before reaching the root, then the current_dir's package belongs to that parent workspace if it's
/// listed on [workspace.members].
///
/// If this package is part of a workspace, returns the path to the workspace directory
/// Otherwise returns the current directory.
pub fn get_workspace_dir(current_dir: &Path) -> PathBuf {
let mut dir = current_dir.to_path_buf();
let project_path = dir.clone();
let logger = Logger::new("millennium:rust");
while dir.pop() {
if dir.join("Cargo.toml").exists() {
match CargoSettings::load(&dir) {
Ok(cargo_settings) => {
if let Some(workspace_settings) = cargo_settings.workspace {
if let Some(members) = workspace_settings.members {
if members.iter().any(|member| {
glob::glob(&dir.join(member).to_string_lossy())
.unwrap()
.any(|p| p.unwrap() == project_path)
}) {
return dir;
}
}
}
}
Err(e) => {
logger.warn(format!(
"Found `{}`, which may define a parent workspace, but \
failed to parse it. If this is indeed a parent workspace, undefined behavior may occur: \
\n {:#}",
dir.display(),
e
));
}
}
}
}
// Nothing found walking up the file system, return the starting directory
current_dir.to_path_buf()
}
#[allow(unused_variables)]
fn millennium_config_to_bundle_settings(
manifest: &Manifest,
config: crate::helpers::config::BundleConfig,
system_tray_config: Option<crate::helpers::config::SystemTrayConfig>,
updater_config: crate::helpers::config::UpdaterConfig
) -> crate::Result<BundleSettings> {
#[cfg(windows)]
let windows_icon_path = PathBuf::from(
config
.icon
.iter()
.find(|i| i.ends_with(".ico"))
.cloned()
.expect("the bundle config must have a `.ico` icon")
);
#[cfg(not(windows))]
let windows_icon_path = PathBuf::from("");
#[allow(unused_mut)]
let mut resources = config.resources.unwrap_or_default();
#[allow(unused_mut)]
let mut depends = config.deb.depends.unwrap_or_default();
#[cfg(target_os = "linux")]
{
if let Some(system_tray_config) = &system_tray_config {
let mut icon_path = system_tray_config.icon_path.clone();
icon_path.set_extension("png");
resources.push(icon_path.display().to_string());
depends.push("libappindicator3-1".to_string());
}
// provides `libwebkit2gtk-4.0.so.37` and all `4.0` versions have the -37 package name
depends.push("libwebkit2gtk-4.0-37".to_string());
depends.push("libgtk-3-0".to_string());
}
#[cfg(windows)]
{
if let Some(webview_fixed_runtime_path) = &config.windows.webview_fixed_runtime_path {
resources.push(webview_fixed_runtime_path.display().to_string());
}
}
let signing_identity = match std::env::var_os("APPLE_SIGNING_IDENTITY") {
Some(signing_identity) => Some(
signing_identity
.to_str()
.expect("failed to convert APPLE_SIGNING_IDENTITY to string")
.to_string()
),
None => config.macos.signing_identity
};
let provider_short_name = match std::env::var_os("APPLE_PROVIDER_SHORT_NAME") {
Some(provider_short_name) => Some(
provider_short_name
.to_str()
.expect("failed to convert APPLE_PROVIDER_SHORT_NAME to string")
.to_string()
),
None => config.macos.provider_short_name
};
Ok(BundleSettings {
identifier: Some(config.identifier),
icon: Some(config.icon),
resources: if resources.is_empty() { None } else { Some(resources) },
copyright: config.copyright,
category: match config.category {
Some(category) => Some(AppCategory::from_str(&category).map_err(|e| match e {
Some(e) => anyhow::anyhow!("invalid category, did you mean `{}`?", e),
None => anyhow::anyhow!("invalid category")
})?),
None => None
},
short_description: config.short_description,
long_description: config.long_description,
external_bin: config.external_bin,
deb: DebianSettings {
depends: if depends.is_empty() { None } else { Some(depends) },
use_bootstrapper: Some(config.deb.use_bootstrapper),
files: config.deb.files
},
macos: MacOsSettings {
frameworks: config.macos.frameworks,
minimum_system_version: config.macos.minimum_system_version,
license: config.macos.license,
use_bootstrapper: Some(config.macos.use_bootstrapper),
exception_domain: config.macos.exception_domain,
signing_identity,
provider_short_name,
entitlements: config.macos.entitlements,
info_plist_path: {
let path = millennium_dir().join("Info.plist");
if path.exists() { Some(path) } else { None }
}
},
windows: WindowsSettings {
timestamp_url: config.windows.timestamp_url,
tsp: config.windows.tsp,
digest_algorithm: config.windows.digest_algorithm,
certificate_thumbprint: config.windows.certificate_thumbprint,
wix: config.windows.wix.map(|w| {
let mut wix = wix_settings(w);
wix.license = wix.license.map(|l| millennium_dir().join(l));
wix
}),
icon_path: windows_icon_path,
webview_fixed_runtime_path: config.windows.webview_fixed_runtime_path,
allow_downgrades: config.windows.allow_downgrades
},
updater: Some(UpdaterSettings {
active: updater_config.active,
// we set it to true by default we shouldn't have to use
// unwrap_or as we have a default value but used to prevent any failing
dialog: updater_config.dialog,
pubkey: updater_config.pubkey,
endpoints: updater_config
.endpoints
.map(|endpoints| endpoints.iter().map(|e| e.to_string()).collect())
}),
..Default::default()
})
}
| 32.036957 | 150 | 0.694239 |
75c3b089397dc15065ddc358a89e778cf430a307 | 4,930 | // Copyright 2014 Tyler Neely
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! Rust wrapper for RocksDB.
//!
//! # Examples
//!
//! ```
//! use rocksdb::{DB, Options};
//! // NB: db is automatically closed at end of lifetime
//! let path = "_path_for_rocksdb_storage";
//! {
//! let db = DB::open_default(path).unwrap();
//! db.put(b"my key", b"my value").unwrap();
//! match db.get(b"my key") {
//! Ok(Some(value)) => println!("retrieved value {}", String::from_utf8(value).unwrap()),
//! Ok(None) => println!("value not found"),
//! Err(e) => println!("operational problem encountered: {}", e),
//! }
//! db.delete(b"my key").unwrap();
//! }
//! let _ = DB::destroy(&Options::default(), path);
//! ```
//!
//! Opening a database and a single column family with custom options:
//!
//! ```
//! use rocksdb::{DB, ColumnFamilyDescriptor, Options};
//!
//! let path = "_path_for_rocksdb_storage_with_cfs";
//! let mut cf_opts = Options::default();
//! cf_opts.set_max_write_buffer_number(16);
//! let cf = ColumnFamilyDescriptor::new("cf1", cf_opts);
//!
//! let mut db_opts = Options::default();
//! db_opts.create_missing_column_families(true);
//! db_opts.create_if_missing(true);
//! {
//! let db = DB::open_cf_descriptors(&db_opts, path, vec![cf]).unwrap();
//! }
//! let _ = DB::destroy(&db_opts, path);
//! ```
//!
#[macro_use]
mod ffi_util;
pub mod backup;
pub mod checkpoint;
mod column_family;
pub mod compaction_filter;
mod comparator;
mod db;
mod db_iterator;
mod db_options;
mod db_pinnable_slice;
pub mod merge_operator;
mod slice_transform;
mod snapshot;
mod write_batch;
pub use crate::{
column_family::{ColumnFamily, ColumnFamilyDescriptor},
compaction_filter::Decision as CompactionDecision,
db::DB,
db_iterator::{DBIterator, DBRawIterator, DBWALIterator, Direction, IteratorMode},
db_options::{
BlockBasedIndexType, BlockBasedOptions, DBCompactionStyle, DBCompressionType,
DBRecoveryMode, DataBlockIndexType, FlushOptions, MemtableFactory, Options,
PlainTableFactoryOptions, ReadOptions, WriteOptions,
},
db_pinnable_slice::DBPinnableSlice,
merge_operator::MergeOperands,
slice_transform::SliceTransform,
snapshot::Snapshot,
write_batch::{WriteBatch, WriteBatchIterator},
};
use librocksdb_sys as ffi;
use std::error;
use std::fmt;
/// A simple wrapper round a string, used for errors reported from
/// ffi calls.
#[derive(Debug, Clone, PartialEq)]
pub struct Error {
message: String,
}
impl Error {
fn new(message: String) -> Error {
Error { message }
}
pub fn into_string(self) -> String {
self.into()
}
}
impl AsRef<str> for Error {
fn as_ref(&self) -> &str {
&self.message
}
}
impl From<Error> for String {
fn from(e: Error) -> String {
e.message
}
}
impl error::Error for Error {
fn description(&self) -> &str {
&self.message
}
}
impl fmt::Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.message.fmt(formatter)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn is_send() {
// test (at compile time) that certain types implement the auto-trait Send, either directly for
// pointer-wrapping types or transitively for types with all Send fields
fn is_send<T: Send>() {
// dummy function just used for its parameterized type bound
}
is_send::<DB>();
is_send::<DBIterator<'_>>();
is_send::<DBRawIterator<'_>>();
is_send::<Snapshot>();
is_send::<Options>();
is_send::<ReadOptions>();
is_send::<WriteOptions>();
is_send::<BlockBasedOptions>();
is_send::<PlainTableFactoryOptions>();
is_send::<ColumnFamilyDescriptor>();
is_send::<ColumnFamily>();
}
#[test]
fn is_sync() {
// test (at compile time) that certain types implement the auto-trait Sync
fn is_sync<T: Sync>() {
// dummy function just used for its parameterized type bound
}
is_sync::<DB>();
is_sync::<Snapshot>();
is_sync::<Options>();
is_sync::<ReadOptions>();
is_sync::<WriteOptions>();
is_sync::<BlockBasedOptions>();
is_sync::<PlainTableFactoryOptions>();
is_sync::<ColumnFamilyDescriptor>();
}
}
| 27.237569 | 103 | 0.639148 |
5b8a1e54a3f1b908dd6abf9a23d0b52793b912b5 | 9,280 | use anyhow::{Context, Result};
use bitcoin::consensus::{deserialize, serialize};
use bitcoin::{Block, BlockHash, OutPoint, Txid};
use crate::{
chain::Chain,
daemon::Daemon,
db::{DBStore, Row, WriteBatch},
metrics::{self, Gauge, Histogram, Metrics},
signals::ExitFlag,
types::{HashPrefixRow, HeaderRow, ScriptHash, ScriptHashRow, SpendingPrefixRow, TxidRow},
};
#[derive(Clone)]
struct Stats {
update_duration: Histogram,
update_size: Histogram,
height: Gauge,
db_properties: Gauge,
}
impl Stats {
fn new(metrics: &Metrics) -> Self {
Self {
update_duration: metrics.histogram_vec(
"index_update_duration",
"Index update duration (in seconds)",
"step",
metrics::default_duration_buckets(),
),
update_size: metrics.histogram_vec(
"index_update_size",
"Index update size (in bytes)",
"step",
metrics::default_size_buckets(),
),
height: metrics.gauge("index_height", "Indexed block height", "type"),
db_properties: metrics.gauge("index_db_properties", "Index DB properties", "name"),
}
}
fn observe_duration<T>(&self, label: &str, f: impl FnOnce() -> T) -> T {
self.update_duration.observe_duration(label, f)
}
fn observe_size(&self, label: &str, rows: &[Row]) {
self.update_size.observe(label, db_rows_size(rows));
}
fn observe_batch(&self, batch: &WriteBatch) {
self.observe_size("write_funding_rows", &batch.funding_rows);
self.observe_size("write_spending_rows", &batch.spending_rows);
self.observe_size("write_txid_rows", &batch.txid_rows);
self.observe_size("write_header_rows", &batch.header_rows);
debug!(
"writing {} funding and {} spending rows from {} transactions, {} blocks",
batch.funding_rows.len(),
batch.spending_rows.len(),
batch.txid_rows.len(),
batch.header_rows.len()
);
}
fn observe_chain(&self, chain: &Chain) {
self.height.set("tip", chain.height() as f64);
}
fn observe_db(&self, store: &DBStore) {
for (cf, name, value) in store.get_properties() {
self.db_properties
.set(&format!("{}:{}", name, cf), value as f64);
}
}
}
struct IndexResult {
header_row: HeaderRow,
funding_rows: Vec<HashPrefixRow>,
spending_rows: Vec<HashPrefixRow>,
txid_rows: Vec<HashPrefixRow>,
}
impl IndexResult {
fn extend(&self, batch: &mut WriteBatch) {
let funding_rows = self.funding_rows.iter().map(HashPrefixRow::to_db_row);
batch.funding_rows.extend(funding_rows);
let spending_rows = self.spending_rows.iter().map(HashPrefixRow::to_db_row);
batch.spending_rows.extend(spending_rows);
let txid_rows = self.txid_rows.iter().map(HashPrefixRow::to_db_row);
batch.txid_rows.extend(txid_rows);
batch.header_rows.push(self.header_row.to_db_row());
batch.tip_row = serialize(&self.header_row.header.block_hash()).into_boxed_slice();
}
}
/// Confirmed transactions' address index
pub struct Index {
store: DBStore,
batch_size: usize,
lookup_limit: Option<usize>,
chain: Chain,
stats: Stats,
}
impl Index {
pub(crate) fn load(
store: DBStore,
mut chain: Chain,
metrics: &Metrics,
batch_size: usize,
lookup_limit: Option<usize>,
reindex_last_blocks: usize,
) -> Result<Self> {
if let Some(row) = store.get_tip() {
let tip = deserialize(&row).expect("invalid tip");
let headers = store
.read_headers()
.into_iter()
.map(|row| HeaderRow::from_db_row(&row).header)
.collect();
chain.load(headers, tip);
chain.drop_last_headers(reindex_last_blocks);
};
let stats = Stats::new(metrics);
stats.observe_chain(&chain);
stats.observe_db(&store);
Ok(Index {
store,
batch_size,
lookup_limit,
chain,
stats,
})
}
pub(crate) fn chain(&self) -> &Chain {
&self.chain
}
pub(crate) fn limit_result<T>(&self, entries: impl Iterator<Item = T>) -> Result<Vec<T>> {
let mut entries = entries.fuse();
let result: Vec<T> = match self.lookup_limit {
Some(lookup_limit) => entries.by_ref().take(lookup_limit).collect(),
None => entries.by_ref().collect(),
};
if entries.next().is_some() {
bail!(">{} index entries, query may take too long", result.len())
}
Ok(result)
}
pub(crate) fn filter_by_txid(&self, txid: Txid) -> impl Iterator<Item = BlockHash> + '_ {
self.store
.iter_txid(TxidRow::scan_prefix(txid))
.map(|row| HashPrefixRow::from_db_row(&row).height())
.filter_map(move |height| self.chain.get_block_hash(height))
}
pub(crate) fn filter_by_funding(
&self,
scripthash: ScriptHash,
) -> impl Iterator<Item = BlockHash> + '_ {
self.store
.iter_funding(ScriptHashRow::scan_prefix(scripthash))
.map(|row| HashPrefixRow::from_db_row(&row).height())
.filter_map(move |height| self.chain.get_block_hash(height))
}
pub(crate) fn filter_by_spending(
&self,
outpoint: OutPoint,
) -> impl Iterator<Item = BlockHash> + '_ {
self.store
.iter_spending(SpendingPrefixRow::scan_prefix(outpoint))
.map(|row| HashPrefixRow::from_db_row(&row).height())
.filter_map(move |height| self.chain.get_block_hash(height))
}
pub(crate) fn sync(&mut self, daemon: &Daemon, exit_flag: &ExitFlag) -> Result<()> {
self.stats.observe_db(&self.store);
loop {
let new_headers = self
.stats
.observe_duration("headers", || daemon.get_new_headers(&self.chain))?;
if new_headers.is_empty() {
break;
}
info!(
"indexing {} blocks: [{}..{}]",
new_headers.len(),
new_headers.first().unwrap().height(),
new_headers.last().unwrap().height()
);
for chunk in new_headers.chunks(self.batch_size) {
exit_flag.poll().with_context(|| {
format!(
"indexing interrupted at height: {}",
chunk.first().unwrap().height()
)
})?;
let blockhashes: Vec<BlockHash> = chunk.iter().map(|h| h.hash()).collect();
let mut heights = chunk.iter().map(|h| h.height());
let mut batch = WriteBatch::default();
daemon.for_blocks(blockhashes, |_blockhash, block| {
let height = heights.next().expect("unexpected block");
self.stats.observe_duration("block", || {
index_single_block(block, height).extend(&mut batch)
});
self.stats.height.set("tip", height as f64);
})?;
let heights: Vec<_> = heights.collect();
assert!(
heights.is_empty(),
"some blocks were not indexed: {:?}",
heights
);
batch.sort();
self.stats.observe_batch(&batch);
self.stats
.observe_duration("write", || self.store.write(&batch));
self.stats.observe_db(&self.store);
}
self.chain.update(new_headers);
self.stats.observe_chain(&self.chain);
}
self.store.flush();
Ok(())
}
}
fn db_rows_size(rows: &[Row]) -> usize {
rows.iter().map(|key| key.len()).sum()
}
fn index_single_block(block: Block, height: usize) -> IndexResult {
let mut funding_rows = Vec::with_capacity(block.txdata.iter().map(|tx| tx.output.len()).sum());
let mut spending_rows = Vec::with_capacity(block.txdata.iter().map(|tx| tx.input.len()).sum());
let mut txid_rows = Vec::with_capacity(block.txdata.len());
for tx in &block.txdata {
txid_rows.push(TxidRow::row(tx.txid(), height));
funding_rows.extend(
tx.output
.iter()
.filter(|txo| !txo.script_pubkey.is_provably_unspendable())
.map(|txo| {
let scripthash = ScriptHash::new(&txo.script_pubkey);
ScriptHashRow::row(scripthash, height)
}),
);
if tx.is_coin_base() {
continue; // coinbase doesn't have inputs
}
spending_rows.extend(
tx.input
.iter()
.map(|txin| SpendingPrefixRow::row(txin.previous_output, height)),
);
}
IndexResult {
funding_rows,
spending_rows,
txid_rows,
header_row: HeaderRow::new(block.header),
}
}
| 33.992674 | 99 | 0.553233 |
d5f6f9a0719bc4a8b26d7499287a977dbb1c87ca | 669 | mod imp;
use glib::Object;
use gtk::glib;
use gtk::prelude::*;
glib::wrapper! {
pub struct IntegerObject(ObjectSubclass<imp::IntegerObject>);
}
impl IntegerObject {
pub fn new(number: i32) -> Self {
Object::new(&[("number", &number)]).expect("Failed to create `IntegerObject`.")
}
pub fn increase_number(self) {
let old_number = self
.property("number")
.expect("The property needs to exist and be readable.")
.get::<i32>()
.expect("The property needs to be of type `i32`.");
self.set_property("number", old_number + 1)
.expect("Could not set property.");
}
}
| 24.777778 | 87 | 0.587444 |
e2a45008748cea1a62d101ee8a4ac913cc3d7b0c | 561 | #![cfg(unix)]
#![deny(warnings, rust_2018_idioms)]
pub mod support;
use crate::support::*;
use libc;
#[test]
fn twice() {
let mut rt = CurrentThreadRuntime::new().unwrap();
let signal = run_with_timeout(&mut rt, Signal::new(libc::SIGUSR1)).unwrap();
send_signal(libc::SIGUSR1);
let (num, signal) = run_with_timeout(&mut rt, signal.into_future())
.ok()
.unwrap();
assert_eq!(num, Some(libc::SIGUSR1));
send_signal(libc::SIGUSR1);
run_with_timeout(&mut rt, signal.into_future())
.ok()
.unwrap();
}
| 22.44 | 80 | 0.622103 |
e6fcf2061ac4084103f0b617a11a38737606a48b | 13,770 | use std::rc::Rc;
use std::cell::RefCell;
use std::any::Any;
use sprite::{Sprite, SpriteAnimation};
use game::GameState;
use map::BlendMode;
pub struct SpriteWithOffset {
pub sprite: Rc<Sprite>,
pub animation: Rc<SpriteAnimation>,
pub animation_frame: usize,
pub x_offset: isize,
pub y_offset: isize,
pub blend_mode: BlendMode,
pub alpha: u8
}
#[derive(Debug, Clone)]
pub struct BoundingRect {
pub x: isize,
pub y: isize,
pub width: isize,
pub height: isize
}
pub struct ActorInfo {
pub x: isize,
pub y: isize,
pub subpixel_x: u8,
pub subpixel_y: u8,
pub velocity_x: isize,
pub velocity_y: isize,
pub collision_bounds: Option<BoundingRect>,
pub collision_channel: u32,
pub blocking_collision: bool,
pub sprites: Vec<SpriteWithOffset>,
pub destroyed: bool,
pub health: i32,
}
pub type ActorRef = Rc<RefCell<Box<Actor>>>;
pub trait ActorAsAny {
fn as_any(&self) -> &Any;
fn as_any_mut(&mut self) -> &mut Any;
}
impl BoundingRect {
pub fn is_colliding(&self, other: &BoundingRect) -> bool {
(other.x < (self.x + self.width)) && (self.x < (other.x + other.width)) &&
(other.y < (self.y + self.height)) && (self.y < (other.y + other.height))
}
pub fn sweep_collision_x(&self, rect: &BoundingRect, final_x: isize) -> Option<isize> {
if (self.y >= (rect.y + rect.height)) || (rect.y >= (self.y + self.height)) {
// Not colliding on y axis
return None;
}
if (self.x < (rect.x + rect.width)) && (rect.x < (self.x + self.width)) {
// Already colliding at start
return Some(rect.x);
}
if ((rect.x + rect.width) <= self.x) && (final_x > rect.x) {
if (self.x - rect.width) < final_x {
// Found earlier collision moving to the right
return Some(self.x - rect.width);
}
} else if (rect.x >= (self.x + self.width)) && (final_x < rect.x) {
if (self.x + self.width) > final_x {
// Found earlier collision moving to the left
return Some(self.x + self.width);
}
}
None
}
pub fn sweep_collision_y(&self, rect: &BoundingRect, final_y: isize) -> Option<isize> {
if (self.x >= (rect.x + rect.width)) || (rect.x >= (self.x + self.width)) {
// Not colliding on x axis
return None;
}
if (self.y < (rect.y + rect.height)) && (rect.y < (self.y + self.height)) {
// Already colliding at start
return Some(rect.y);
}
if ((rect.y + rect.height) <= self.y) && (final_y > rect.y) {
if (self.y - rect.height) < final_y {
// Found earlier collision moving down
return Some(self.y - rect.height);
}
} else if (rect.y >= (self.y + self.height)) && (final_y < rect.y) {
if (self.y + self.height) > final_y {
// Found earlier collision moving up
return Some(self.y + self.height);
}
}
None
}
}
pub enum MovementCollision {
None,
CollidedWithWorld,
CollidedWithActor(ActorRef)
}
pub trait Actor: ActorAsAny {
fn actor_info(&self) -> &ActorInfo;
fn actor_info_mut(&mut self) -> &mut ActorInfo;
fn init(&mut self, _game_state: &GameState) {}
fn update(&mut self, _game_state: &GameState) {}
fn destroy(&mut self) {
self.actor_info_mut().destroyed = true;
}
fn is_destroyed(&self) -> bool {
self.actor_info().destroyed
}
fn move_with_collision(&mut self, game_state: &GameState) -> MovementCollision {
let actor_info = self.actor_info_mut();
let mut full_x = (actor_info.x << 8) + actor_info.subpixel_x as isize;
let mut full_y = (actor_info.y << 8) + actor_info.subpixel_y as isize;
full_x += actor_info.velocity_x;
full_y += actor_info.velocity_y;
let mut new_x = full_x >> 8;
let mut new_y = full_y >> 8;
let collision_x_offset;
let collision_y_offset;
let collision_width;
let collision_height;
let mut collision_result = MovementCollision::None;
if let Some(collision_bounds) = &actor_info.collision_bounds {
collision_x_offset = collision_bounds.x;
collision_y_offset = collision_bounds.y;
collision_width = collision_bounds.width;
collision_height = collision_bounds.height;
let mut bounds = BoundingRect {
x: actor_info.x + collision_x_offset,
y: actor_info.y + collision_y_offset,
width: collision_width,
height: collision_height
};
if let Some(map) = &game_state.map {
if let Some(revised_x) = map.sweep_collision_x(&bounds, new_x + collision_x_offset, actor_info.collision_channel) {
new_x = revised_x - collision_x_offset;
full_x = new_x << 8;
actor_info.velocity_x = 0;
collision_result = MovementCollision::CollidedWithWorld;
}
for actor_ref in &game_state.actors {
if let Ok(other_actor) = actor_ref.try_borrow() {
let other_actor_info = other_actor.actor_info();
if !other_actor_info.blocking_collision {
continue;
}
if let Some(other_bounds) = &other_actor_info.collision_bounds {
let other_collision_x_offset = other_bounds.x;
let other_collision_y_offset = other_bounds.y;
let other_collision_width = other_bounds.width;
let other_collision_height = other_bounds.height;
let mut other_bounds = BoundingRect {
x: other_actor_info.x + other_collision_x_offset,
y: other_actor_info.y + other_collision_y_offset,
width: other_collision_width,
height: other_collision_height
};
if let Some(revised_x) = other_bounds.sweep_collision_x(&bounds, new_x + collision_x_offset) {
new_x = revised_x - collision_x_offset;
full_x = new_x << 8;
actor_info.velocity_x = 0;
collision_result = MovementCollision::CollidedWithActor(actor_ref.clone());
}
}
}
}
bounds.x = new_x + collision_x_offset;
if let Some(revised_y) = map.sweep_collision_y(&bounds, new_y + collision_y_offset, actor_info.collision_channel) {
new_y = revised_y - collision_y_offset;
full_y = new_y << 8;
actor_info.velocity_y = 0;
collision_result = MovementCollision::CollidedWithWorld;
}
for actor_ref in &game_state.actors {
if let Ok(other_actor) = actor_ref.try_borrow() {
let other_actor_info = other_actor.actor_info();
if !other_actor_info.blocking_collision {
continue;
}
if let Some(other_bounds) = &other_actor_info.collision_bounds {
let other_collision_x_offset = other_bounds.x;
let other_collision_y_offset = other_bounds.y;
let other_collision_width = other_bounds.width;
let other_collision_height = other_bounds.height;
let mut other_bounds = BoundingRect {
x: other_actor_info.x + other_collision_x_offset,
y: other_actor_info.y + other_collision_y_offset,
width: other_collision_width,
height: other_collision_height
};
if let Some(revised_y) = other_bounds.sweep_collision_y(&bounds, new_y + collision_y_offset) {
new_y = revised_y - collision_y_offset;
full_y = new_y << 8;
actor_info.velocity_y = 0;
collision_result = MovementCollision::CollidedWithActor(actor_ref.clone());
}
}
}
}
}
}
actor_info.x = full_x >> 8;
actor_info.y = full_y >> 8;
actor_info.subpixel_x = (full_x & 0xff) as u8;
actor_info.subpixel_y = (full_y & 0xff) as u8;
collision_result
}
fn check_for_actor_collision(&mut self, game_state: &GameState) -> Vec<ActorRef> {
let actor_info = self.actor_info();
let mut collided_actors: Vec<ActorRef> = Vec::new();
if let Some(collision_bounds) = &actor_info.collision_bounds {
let collision_x_offset = collision_bounds.x;
let collision_y_offset = collision_bounds.y;
let collision_width = collision_bounds.width;
let collision_height = collision_bounds.height;
let mut bounds = BoundingRect {
x: actor_info.x + collision_x_offset,
y: actor_info.y + collision_y_offset,
width: collision_width,
height: collision_height
};
for actor_ref in &game_state.actors {
if let Ok(other_actor) = actor_ref.try_borrow() {
let other_actor_info = other_actor.actor_info();
if let Some(other_bounds) = &other_actor_info.collision_bounds {
let collision_x_offset = other_bounds.x;
let collision_y_offset = other_bounds.y;
let collision_width = other_bounds.width;
let collision_height = other_bounds.height;
let mut other_bounds = BoundingRect {
x: other_actor_info.x + collision_x_offset,
y: other_actor_info.y + collision_y_offset,
width: collision_width,
height: collision_height
};
if bounds.is_colliding(&other_bounds) {
collided_actors.push(actor_ref.clone());
}
}
}
}
}
collided_actors
}
fn before_move(&mut self, _game_state: &GameState) {}
fn after_move(&mut self, _game_state: &GameState) {}
fn apply_move(&mut self, game_state: &GameState) {
self.before_move(game_state);
match self.move_with_collision(game_state) {
MovementCollision::CollidedWithWorld => self.on_collide_with_world(game_state),
MovementCollision::CollidedWithActor(actor) => self.on_collide_with_actor(&actor, game_state),
_ => ()
}
for actor in self.check_for_actor_collision(game_state) {
self.on_collide_with_actor(&actor, game_state);
}
self.after_move(game_state);
}
fn tick(&mut self, game_state: &GameState) {
self.update(game_state);
self.apply_move(game_state);
}
fn add_sprite(&mut self, sprite: Rc<Sprite>, x_offset: isize, y_offset: isize) -> usize {
self.actor_info_mut().add_sprite(sprite, x_offset, y_offset)
}
fn add_sprite_with_blending(&mut self, sprite: Rc<Sprite>, x_offset: isize, y_offset: isize,
blend_mode: BlendMode, alpha: u8) -> usize {
self.actor_info_mut().add_sprite_with_blending(sprite, x_offset, y_offset, blend_mode, alpha)
}
fn set_sprite_alpha(&mut self, sprite_index: usize, alpha: u8) {
self.actor_info_mut().set_sprite_alpha(sprite_index, alpha);
}
fn get_sprite_alpha(&mut self, sprite_index: usize) -> u8 {
self.actor_info_mut().get_sprite_alpha(sprite_index)
}
fn adjust_sprite_alpha(&mut self, sprite_index: usize, change: i8) {
self.actor_info_mut().adjust_sprite_alpha(sprite_index, change);
}
fn start_animation(&mut self, name: &str) {
self.actor_info_mut().start_animation(name);
}
fn set_collision_bounds(&mut self, bounds: BoundingRect) {
self.actor_info_mut().set_collision_bounds(bounds);
}
fn clear_collision_bounds(&mut self) {
self.actor_info_mut().clear_collision_bounds();
}
fn get_camera_focus_offset(&self) -> (isize, isize) { (0, 0) }
fn adjust_health(&mut self, amount: i32, game_state: &GameState) {
if self.actor_info().health <= 0 {
return;
}
let new_health = self.actor_info().health.saturating_add(amount);
self.actor_info_mut().health = new_health;
if new_health <= 0 {
self.on_death(game_state);
}
}
fn damage(&mut self, _damage_type: &str, _amount: i32, _game_state: &GameState) {}
fn knockback(&mut self, _x: isize, _y: isize, _game_state: &GameState) {}
fn on_death(&mut self, _game_state: &GameState) {}
fn on_button_down(&mut self, _name: &str, _game_state: &GameState) {}
fn on_button_up(&mut self, _name: &str, _game_state: &GameState) {}
fn on_axis_changed(&mut self, _name: &str, _value: f32, _game_state: &GameState) {}
fn on_collide_with_world(&mut self, _game_state: &GameState) {}
fn on_collide_with_actor(&mut self, _actor: &ActorRef, _game_state: &GameState) {}
fn on_persistent_actor_removed(&mut self, _game_state: &GameState) {}
}
impl ActorInfo {
pub fn new(x: isize, y: isize) -> ActorInfo {
ActorInfo {
x, y,
subpixel_x: 0,
subpixel_y: 0,
velocity_x: 0,
velocity_y: 0,
collision_bounds: None,
collision_channel: 0,
blocking_collision: false,
sprites: Vec::new(),
destroyed: false,
health: 100
}
}
pub fn add_sprite(&mut self, sprite: Rc<Sprite>, x_offset: isize, y_offset: isize) -> usize {
let animation = sprite.get_default_animation();
let index = self.sprites.len();
self.sprites.push(SpriteWithOffset {
sprite,
animation,
animation_frame: 0,
x_offset, y_offset,
blend_mode: BlendMode::Normal,
alpha: 0
});
index
}
pub fn add_sprite_with_blending(&mut self, sprite: Rc<Sprite>, x_offset: isize, y_offset: isize,
blend_mode: BlendMode, alpha: u8) -> usize {
let animation = sprite.get_default_animation();
let index = self.sprites.len();
self.sprites.push(SpriteWithOffset {
sprite,
animation,
animation_frame: 0,
x_offset, y_offset,
blend_mode, alpha
});
index
}
pub fn set_sprite_alpha(&mut self, sprite_index: usize, alpha: u8) {
if sprite_index < self.sprites.len() {
self.sprites[sprite_index].alpha = alpha;
}
}
pub fn get_sprite_alpha(&mut self, sprite_index: usize) -> u8 {
if sprite_index < self.sprites.len() {
self.sprites[sprite_index].alpha
} else {
0
}
}
pub fn adjust_sprite_alpha(&mut self, sprite_index: usize, change: i8) {
let mut alpha = self.get_sprite_alpha(sprite_index);
if change > 0 {
alpha = alpha.saturating_add(change as u8);
if alpha > 16 {
alpha = 16;
}
} else if change < 0 {
alpha = alpha.saturating_sub((-change) as u8);
}
self.set_sprite_alpha(sprite_index, alpha);
}
pub fn start_animation(&mut self, name: &str) {
for sprite in &mut self.sprites {
if let Some(animation) = sprite.sprite.get_animation_by_name(name) {
if !Rc::ptr_eq(&animation, &sprite.animation) {
sprite.animation = animation;
sprite.animation_frame = 0;
}
}
}
}
pub fn set_collision_bounds(&mut self, bounds: BoundingRect) {
self.collision_bounds = Some(bounds);
}
pub fn clear_collision_bounds(&mut self) {
self.collision_bounds = None;
}
}
impl<T: Actor + 'static> ActorAsAny for T {
fn as_any(&self) -> &Any {
self
}
fn as_any_mut(&mut self) -> &mut Any {
self
}
}
| 29.235669 | 119 | 0.687073 |
2f5436e63a2c611ad08a9e902baea1d6595e79d4 | 1,247 | use std::iter::FromIterator;
fn merge(v: &mut Vec<u32>, start: usize, mid: usize, end: usize){
// print!("{} {}\n",start, end);
let first = Vec::from_iter(v[start..mid].iter().cloned());
let second = Vec::from_iter(v[mid..end].iter().cloned());
let mut first_idx = 0;
let mut second_idx = 0;
let mut v_idx = start;
while first_idx < first.len() && second_idx < second.len(){
if first[first_idx] < second[second_idx] {
v[v_idx] = first[first_idx];
first_idx += 1;
} else {
v[v_idx] = second[second_idx];
second_idx += 1;
}
v_idx += 1;
}
while first_idx < first.len() {
v[v_idx] = first[first_idx];
v_idx += 1;
first_idx += 1;
}
while second_idx < second.len() {
v[v_idx] = second[second_idx];
v_idx += 1;
second_idx += 1;
}
}
fn merge_sort_helper(v: &mut Vec<u32>, start: usize, end: usize){
if start < end - 1 {
let mid = (start + end) / 2;
merge_sort_helper(v, start, mid);
merge_sort_helper(v, mid, end);
merge(v, start, mid, end);
}
}
pub fn merge_sort(v: &mut Vec<u32>){
merge_sort_helper(v, 0, v.len());
}
| 23.980769 | 65 | 0.52927 |
5d43a3eeeca274db65d88f9f0aa530d117975254 | 3,516 | //! Sends and unloads entities and chunks for a client.
//!
//! The entities and chunks visible to each client are
//! determined based on the player's [`common::view::View`].
use ahash::AHashMap;
use base::{ChunkPosition, Position};
use common::{
events::{ChunkLoadEvent, ViewUpdateEvent},
Game,
};
use ecs::{Entity, SysResult, SystemExecutor};
use crate::{Client, ClientId, Server};
pub fn register(_game: &mut Game, systems: &mut SystemExecutor<Game>) {
systems
.group::<Server>()
.add_system(send_new_chunks)
.add_system(send_loaded_chunks);
}
/// Stores the players waiting on chunks that are currently being loaded.
#[derive(Default)]
pub struct WaitingChunks(AHashMap<ChunkPosition, Vec<Entity>>);
impl WaitingChunks {
pub fn drain_players_waiting_for(&mut self, chunk: ChunkPosition) -> Vec<Entity> {
self.0.remove(&chunk).unwrap_or_default()
}
pub fn insert(&mut self, player: Entity, chunk: ChunkPosition) {
self.0.entry(chunk).or_default().push(player);
}
}
fn send_new_chunks(game: &mut Game, server: &mut Server) -> SysResult {
for (player, (&client_id, event, &position)) in game
.ecs
.query::<(&ClientId, &ViewUpdateEvent, &Position)>()
.iter()
{
// As ecs removes the client one tick after it gets removed here, it can
// happen that a client is still listed in the ecs but actually removed here so
// we need to check if the client is actually still there.
if let Some(client) = server.clients.get(client_id) {
client.update_own_chunk(event.new_view.center());
update_chunks(
game,
player,
client,
event,
position,
&mut server.waiting_chunks,
)?;
}
}
Ok(())
}
fn update_chunks(
game: &Game,
player: Entity,
client: &Client,
event: &ViewUpdateEvent,
position: Position,
waiting_chunks: &mut WaitingChunks,
) -> SysResult {
// Send chunks that are in the new view but not the old view.
for &pos in &event.new_chunks {
if let Some(chunk) = game.world.chunk_map().chunk_handle_at(pos) {
client.send_chunk(&chunk);
} else {
waiting_chunks.insert(player, pos);
}
}
// Unsend the chunks that are in the old view but not the new view.
for &pos in &event.old_chunks {
client.unload_chunk(pos);
}
spawn_client_if_needed(client, position);
Ok(())
}
/// Sends newly loaded chunks to players currently
/// waiting for those chunks to load.
fn send_loaded_chunks(game: &mut Game, server: &mut Server) -> SysResult {
for (_, event) in game.ecs.query::<&ChunkLoadEvent>().iter() {
for player in server
.waiting_chunks
.drain_players_waiting_for(event.position)
{
if let Ok(client_id) = game.ecs.get::<ClientId>(player) {
if let Some(client) = server.clients.get(*client_id) {
client.send_chunk(&event.chunk);
spawn_client_if_needed(client, *game.ecs.get::<Position>(player)?);
}
}
}
}
Ok(())
}
fn spawn_client_if_needed(client: &Client, pos: Position) {
if !client.knows_own_position() && client.known_chunks() >= 9 * 9 {
log::debug!("Sent all chunks to {}; now spawning", client.username());
client.update_own_position(pos);
}
}
| 31.115044 | 87 | 0.612059 |
f785eaa7cb70d27f21ce39e5e04140588cadc297 | 4,851 | use super::*;
use super::dialogue_helpers::{launch_dialogue, DialogueBuilder};
use components::*;
use resources::*;
pub struct HackCallbackHandlerSystem;
#[derive(SystemData)]
pub struct HackCallbackHandlerSystemData<'a> {
queued_actions: Write<'a, QueuedPlayerActions>,
hackable: WriteStorage<'a, Hackable>,
callbacks: Write<'a, Callbacks>,
}
impl<'a> System<'a> for HackCallbackHandlerSystem {
type SystemData = HackCallbackHandlerSystemData<'a>;
fn run(&mut self, mut data: Self::SystemData) {
let hack_callbacks = data.callbacks.take_some(|cb| match cb {
Callback::Hack(hdc) => TakeDecision::Take(hdc),
x => TakeDecision::Leave(x),
});
for hcb in hack_callbacks {
handle_hack_callback(hcb, &mut data.queued_actions, &mut data.hackable, &mut data.callbacks);
}
}
}
fn handle_hack_callback(
hack_callback: HackCallback,
queued_actions: &mut QueuedPlayerActions,
hackable: &mut WriteStorage<'_, Hackable>,
callbacks: &mut Callbacks,
) {
match hack_callback {
HackCallback::InitiateHack { target, turn_duration } => {
for _ in 0..turn_duration {
queued_actions.action_queue.push_back(QueuedPlayerAction::Wait);
}
queued_actions.action_queue.push_back(QueuedPlayerAction::Hack { target });
}
HackCallback::ChooseHackTarget { entity } => {
let hackable = hackable
.get_mut(entity)
.expect("If we initiated hack on an entity, it better be hackable");
let mut builder = DialogueBuilder::new(&format!("Hacking {}...", hackable.name));
match &hackable.hack_state {
HackState::Uncompromised => {
builder = builder.with_option(
"[Compromise]",
vec![
Callback::Hack(HackCallback::InitiateHack {
target: HackTarget {
entity,
hack_type: HackType::Compromise,
},
turn_duration: 60,
}),
Callback::EndDialogue,
],
);
}
HackState::Compromised => {
builder = builder
.with_option(
"[Lock Shut]",
vec![
Callback::Hack(HackCallback::InitiateHack {
target: HackTarget {
entity,
hack_type: HackType::Door {
new_door_behavior: DoorBehavior::StayClosed,
},
},
turn_duration: 5,
}),
Callback::EndDialogue,
],
)
.with_option(
"[Lock Open]",
vec![
Callback::Hack(HackCallback::InitiateHack {
target: HackTarget {
entity,
hack_type: HackType::Door {
new_door_behavior: DoorBehavior::StayOpen,
},
},
turn_duration: 5,
}),
Callback::EndDialogue,
],
)
.with_option(
"[Set to Automatic]",
vec![
Callback::Hack(HackCallback::InitiateHack {
target: HackTarget {
entity,
hack_type: HackType::Door {
new_door_behavior: DoorBehavior::FullAuto,
},
},
turn_duration: 5,
}),
Callback::EndDialogue,
],
);
}
};
builder = builder.with_option("[Cancel]", vec![Callback::EndDialogue]);
launch_dialogue(builder, callbacks);
}
}
}
| 38.808 | 105 | 0.398681 |
0e381148b1cccc673e529c62a58e4b77b0606788 | 527 | //! Types and support for parsing the component model text format.
mod alias;
mod component;
mod deftype;
mod export;
mod func;
mod import;
mod instance;
mod intertype;
mod item_ref;
mod module;
mod types;
pub use self::alias::*;
pub use self::component::*;
pub use self::deftype::*;
pub use self::export::*;
pub use self::func::*;
pub use self::import::*;
pub use self::instance::*;
pub use self::intertype::*;
pub use self::item_ref::*;
pub use self::module::*;
pub use self::types::*;
mod binary;
mod expand;
mod resolve;
| 18.172414 | 66 | 0.70019 |
8724201dd575415d8e1b8cb7ec961f2266540838 | 15,045 | #[doc = "Reader of register CPUIRQSEL30"]
pub type R = crate::R<u32, super::CPUIRQSEL30>;
#[doc = "Writer for register CPUIRQSEL30"]
pub type W = crate::W<u32, super::CPUIRQSEL30>;
#[doc = "Register CPUIRQSEL30 `reset()`'s with value 0"]
impl crate::ResetValue for super::CPUIRQSEL30 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "6:0\\]
Read/write selection value Writing any other value than values defined by a ENUM may result in undefined behavior.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum EV_A {
#[doc = "121: Always asserted"]
ALWAYS_ACTIVE = 121,
#[doc = "119: RTC periodic event controlled by AON_RTC:CTL.RTC_UPD_EN"]
AON_RTC_UPD = 119,
#[doc = "114: Loopback of OBSMUX0 through AUX, corresponds to AUX_EVCTL:EVTOMCUFLAGS.MCU_OBSMUX0"]
AUX_OBSMUX0 = 114,
#[doc = "113: AUX ADC FIFO watermark event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_ADC_FIFO_ALMOST_FULL"]
AUX_ADC_FIFO_ALMOST_FULL = 113,
#[doc = "112: AUX ADC done, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_ADC_DONE"]
AUX_ADC_DONE = 112,
#[doc = "111: Autotake event from AUX semaphore, configured by AUX_SMPH:AUTOTAKE"]
AUX_SMPH_AUTOTAKE_DONE = 111,
#[doc = "110: AUX timer 1 event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER1_EV"]
AUX_TIMER1_EV = 110,
#[doc = "109: AUX timer 0 event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER0_EV"]
AUX_TIMER0_EV = 109,
#[doc = "108: AUX TDC measurement done event, corresponds to the flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TDC_DONE and the AUX_TDC status AUX_TDC:STAT.DONE"]
AUX_TDC_DONE = 108,
#[doc = "107: AUX Compare B event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_COMPB"]
AUX_COMPB = 107,
#[doc = "105: AON wakeup event, the corresponding flag is here AUX_EVCTL:EVTOMCUFLAGS.AUX_WU_EV"]
AUX_AON_WU_EV = 105,
#[doc = "94: CRYPTO DMA input done event, the correspondingg flag is CRYPTO:IRQSTAT.DMA_IN_DONE. Controlled by CRYPTO:IRQEN.DMA_IN_DONE"]
CRYPTO_DMA_DONE_IRQ = 94,
#[doc = "60: AUX Timer2 pulse, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_PULSE"]
AUX_TIMER2_PULSE = 60,
#[doc = "59: AUX Timer2 event 3, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV3"]
AUX_TIMER2_EV3 = 59,
#[doc = "58: AUX Timer2 event 2, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV2"]
AUX_TIMER2_EV2 = 58,
#[doc = "57: AUX Timer2 event 1, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV1"]
AUX_TIMER2_EV1 = 57,
#[doc = "56: AUX Timer2 event 0, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV0"]
AUX_TIMER2_EV0 = 56,
#[doc = "22: DMA done for software tiggered UDMA channel 18, see UDMA0:SOFTREQ"]
DMA_CH18_DONE = 22,
#[doc = "20: DMA done for software tiggered UDMA channel 0, see UDMA0:SOFTREQ"]
DMA_CH0_DONE = 20,
#[doc = "10: AUX Software event 0, AUX_EVCTL:SWEVSET.SWEV0"]
AON_AUX_SWEV0 = 10,
#[doc = "8: Interrupt event from I2S"]
I2S_IRQ = 8,
#[doc = "3: AON programmable event 2. Event selected by AON_EVENT MCU event selector, AON_EVENT:EVTOMCUSEL.AON_PROG2_EV"]
AON_PROG2 = 3,
#[doc = "2: AON programmable event 1. Event selected by AON_EVENT MCU event selector, AON_EVENT:EVTOMCUSEL.AON_PROG1_EV"]
AON_PROG1 = 2,
#[doc = "0: Always inactive"]
NONE = 0,
}
impl From<EV_A> for u8 {
#[inline(always)]
fn from(variant: EV_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `EV`"]
pub type EV_R = crate::R<u8, EV_A>;
impl EV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, EV_A> {
use crate::Variant::*;
match self.bits {
121 => Val(EV_A::ALWAYS_ACTIVE),
119 => Val(EV_A::AON_RTC_UPD),
114 => Val(EV_A::AUX_OBSMUX0),
113 => Val(EV_A::AUX_ADC_FIFO_ALMOST_FULL),
112 => Val(EV_A::AUX_ADC_DONE),
111 => Val(EV_A::AUX_SMPH_AUTOTAKE_DONE),
110 => Val(EV_A::AUX_TIMER1_EV),
109 => Val(EV_A::AUX_TIMER0_EV),
108 => Val(EV_A::AUX_TDC_DONE),
107 => Val(EV_A::AUX_COMPB),
105 => Val(EV_A::AUX_AON_WU_EV),
94 => Val(EV_A::CRYPTO_DMA_DONE_IRQ),
60 => Val(EV_A::AUX_TIMER2_PULSE),
59 => Val(EV_A::AUX_TIMER2_EV3),
58 => Val(EV_A::AUX_TIMER2_EV2),
57 => Val(EV_A::AUX_TIMER2_EV1),
56 => Val(EV_A::AUX_TIMER2_EV0),
22 => Val(EV_A::DMA_CH18_DONE),
20 => Val(EV_A::DMA_CH0_DONE),
10 => Val(EV_A::AON_AUX_SWEV0),
8 => Val(EV_A::I2S_IRQ),
3 => Val(EV_A::AON_PROG2),
2 => Val(EV_A::AON_PROG1),
0 => Val(EV_A::NONE),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `ALWAYS_ACTIVE`"]
#[inline(always)]
pub fn is_always_active(&self) -> bool {
*self == EV_A::ALWAYS_ACTIVE
}
#[doc = "Checks if the value of the field is `AON_RTC_UPD`"]
#[inline(always)]
pub fn is_aon_rtc_upd(&self) -> bool {
*self == EV_A::AON_RTC_UPD
}
#[doc = "Checks if the value of the field is `AUX_OBSMUX0`"]
#[inline(always)]
pub fn is_aux_obsmux0(&self) -> bool {
*self == EV_A::AUX_OBSMUX0
}
#[doc = "Checks if the value of the field is `AUX_ADC_FIFO_ALMOST_FULL`"]
#[inline(always)]
pub fn is_aux_adc_fifo_almost_full(&self) -> bool {
*self == EV_A::AUX_ADC_FIFO_ALMOST_FULL
}
#[doc = "Checks if the value of the field is `AUX_ADC_DONE`"]
#[inline(always)]
pub fn is_aux_adc_done(&self) -> bool {
*self == EV_A::AUX_ADC_DONE
}
#[doc = "Checks if the value of the field is `AUX_SMPH_AUTOTAKE_DONE`"]
#[inline(always)]
pub fn is_aux_smph_autotake_done(&self) -> bool {
*self == EV_A::AUX_SMPH_AUTOTAKE_DONE
}
#[doc = "Checks if the value of the field is `AUX_TIMER1_EV`"]
#[inline(always)]
pub fn is_aux_timer1_ev(&self) -> bool {
*self == EV_A::AUX_TIMER1_EV
}
#[doc = "Checks if the value of the field is `AUX_TIMER0_EV`"]
#[inline(always)]
pub fn is_aux_timer0_ev(&self) -> bool {
*self == EV_A::AUX_TIMER0_EV
}
#[doc = "Checks if the value of the field is `AUX_TDC_DONE`"]
#[inline(always)]
pub fn is_aux_tdc_done(&self) -> bool {
*self == EV_A::AUX_TDC_DONE
}
#[doc = "Checks if the value of the field is `AUX_COMPB`"]
#[inline(always)]
pub fn is_aux_compb(&self) -> bool {
*self == EV_A::AUX_COMPB
}
#[doc = "Checks if the value of the field is `AUX_AON_WU_EV`"]
#[inline(always)]
pub fn is_aux_aon_wu_ev(&self) -> bool {
*self == EV_A::AUX_AON_WU_EV
}
#[doc = "Checks if the value of the field is `CRYPTO_DMA_DONE_IRQ`"]
#[inline(always)]
pub fn is_crypto_dma_done_irq(&self) -> bool {
*self == EV_A::CRYPTO_DMA_DONE_IRQ
}
#[doc = "Checks if the value of the field is `AUX_TIMER2_PULSE`"]
#[inline(always)]
pub fn is_aux_timer2_pulse(&self) -> bool {
*self == EV_A::AUX_TIMER2_PULSE
}
#[doc = "Checks if the value of the field is `AUX_TIMER2_EV3`"]
#[inline(always)]
pub fn is_aux_timer2_ev3(&self) -> bool {
*self == EV_A::AUX_TIMER2_EV3
}
#[doc = "Checks if the value of the field is `AUX_TIMER2_EV2`"]
#[inline(always)]
pub fn is_aux_timer2_ev2(&self) -> bool {
*self == EV_A::AUX_TIMER2_EV2
}
#[doc = "Checks if the value of the field is `AUX_TIMER2_EV1`"]
#[inline(always)]
pub fn is_aux_timer2_ev1(&self) -> bool {
*self == EV_A::AUX_TIMER2_EV1
}
#[doc = "Checks if the value of the field is `AUX_TIMER2_EV0`"]
#[inline(always)]
pub fn is_aux_timer2_ev0(&self) -> bool {
*self == EV_A::AUX_TIMER2_EV0
}
#[doc = "Checks if the value of the field is `DMA_CH18_DONE`"]
#[inline(always)]
pub fn is_dma_ch18_done(&self) -> bool {
*self == EV_A::DMA_CH18_DONE
}
#[doc = "Checks if the value of the field is `DMA_CH0_DONE`"]
#[inline(always)]
pub fn is_dma_ch0_done(&self) -> bool {
*self == EV_A::DMA_CH0_DONE
}
#[doc = "Checks if the value of the field is `AON_AUX_SWEV0`"]
#[inline(always)]
pub fn is_aon_aux_swev0(&self) -> bool {
*self == EV_A::AON_AUX_SWEV0
}
#[doc = "Checks if the value of the field is `I2S_IRQ`"]
#[inline(always)]
pub fn is_i2s_irq(&self) -> bool {
*self == EV_A::I2S_IRQ
}
#[doc = "Checks if the value of the field is `AON_PROG2`"]
#[inline(always)]
pub fn is_aon_prog2(&self) -> bool {
*self == EV_A::AON_PROG2
}
#[doc = "Checks if the value of the field is `AON_PROG1`"]
#[inline(always)]
pub fn is_aon_prog1(&self) -> bool {
*self == EV_A::AON_PROG1
}
#[doc = "Checks if the value of the field is `NONE`"]
#[inline(always)]
pub fn is_none(&self) -> bool {
*self == EV_A::NONE
}
}
#[doc = "Write proxy for field `EV`"]
pub struct EV_W<'a> {
w: &'a mut W,
}
impl<'a> EV_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EV_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Always asserted"]
#[inline(always)]
pub fn always_active(self) -> &'a mut W {
self.variant(EV_A::ALWAYS_ACTIVE)
}
#[doc = "RTC periodic event controlled by AON_RTC:CTL.RTC_UPD_EN"]
#[inline(always)]
pub fn aon_rtc_upd(self) -> &'a mut W {
self.variant(EV_A::AON_RTC_UPD)
}
#[doc = "Loopback of OBSMUX0 through AUX, corresponds to AUX_EVCTL:EVTOMCUFLAGS.MCU_OBSMUX0"]
#[inline(always)]
pub fn aux_obsmux0(self) -> &'a mut W {
self.variant(EV_A::AUX_OBSMUX0)
}
#[doc = "AUX ADC FIFO watermark event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_ADC_FIFO_ALMOST_FULL"]
#[inline(always)]
pub fn aux_adc_fifo_almost_full(self) -> &'a mut W {
self.variant(EV_A::AUX_ADC_FIFO_ALMOST_FULL)
}
#[doc = "AUX ADC done, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_ADC_DONE"]
#[inline(always)]
pub fn aux_adc_done(self) -> &'a mut W {
self.variant(EV_A::AUX_ADC_DONE)
}
#[doc = "Autotake event from AUX semaphore, configured by AUX_SMPH:AUTOTAKE"]
#[inline(always)]
pub fn aux_smph_autotake_done(self) -> &'a mut W {
self.variant(EV_A::AUX_SMPH_AUTOTAKE_DONE)
}
#[doc = "AUX timer 1 event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER1_EV"]
#[inline(always)]
pub fn aux_timer1_ev(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER1_EV)
}
#[doc = "AUX timer 0 event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER0_EV"]
#[inline(always)]
pub fn aux_timer0_ev(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER0_EV)
}
#[doc = "AUX TDC measurement done event, corresponds to the flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TDC_DONE and the AUX_TDC status AUX_TDC:STAT.DONE"]
#[inline(always)]
pub fn aux_tdc_done(self) -> &'a mut W {
self.variant(EV_A::AUX_TDC_DONE)
}
#[doc = "AUX Compare B event, corresponds to AUX_EVCTL:EVTOMCUFLAGS.AUX_COMPB"]
#[inline(always)]
pub fn aux_compb(self) -> &'a mut W {
self.variant(EV_A::AUX_COMPB)
}
#[doc = "AON wakeup event, the corresponding flag is here AUX_EVCTL:EVTOMCUFLAGS.AUX_WU_EV"]
#[inline(always)]
pub fn aux_aon_wu_ev(self) -> &'a mut W {
self.variant(EV_A::AUX_AON_WU_EV)
}
#[doc = "CRYPTO DMA input done event, the correspondingg flag is CRYPTO:IRQSTAT.DMA_IN_DONE. Controlled by CRYPTO:IRQEN.DMA_IN_DONE"]
#[inline(always)]
pub fn crypto_dma_done_irq(self) -> &'a mut W {
self.variant(EV_A::CRYPTO_DMA_DONE_IRQ)
}
#[doc = "AUX Timer2 pulse, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_PULSE"]
#[inline(always)]
pub fn aux_timer2_pulse(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER2_PULSE)
}
#[doc = "AUX Timer2 event 3, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV3"]
#[inline(always)]
pub fn aux_timer2_ev3(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER2_EV3)
}
#[doc = "AUX Timer2 event 2, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV2"]
#[inline(always)]
pub fn aux_timer2_ev2(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER2_EV2)
}
#[doc = "AUX Timer2 event 1, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV1"]
#[inline(always)]
pub fn aux_timer2_ev1(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER2_EV1)
}
#[doc = "AUX Timer2 event 0, corresponding to flag AUX_EVCTL:EVTOMCUFLAGS.AUX_TIMER2_EV0"]
#[inline(always)]
pub fn aux_timer2_ev0(self) -> &'a mut W {
self.variant(EV_A::AUX_TIMER2_EV0)
}
#[doc = "DMA done for software tiggered UDMA channel 18, see UDMA0:SOFTREQ"]
#[inline(always)]
pub fn dma_ch18_done(self) -> &'a mut W {
self.variant(EV_A::DMA_CH18_DONE)
}
#[doc = "DMA done for software tiggered UDMA channel 0, see UDMA0:SOFTREQ"]
#[inline(always)]
pub fn dma_ch0_done(self) -> &'a mut W {
self.variant(EV_A::DMA_CH0_DONE)
}
#[doc = "AUX Software event 0, AUX_EVCTL:SWEVSET.SWEV0"]
#[inline(always)]
pub fn aon_aux_swev0(self) -> &'a mut W {
self.variant(EV_A::AON_AUX_SWEV0)
}
#[doc = "Interrupt event from I2S"]
#[inline(always)]
pub fn i2s_irq(self) -> &'a mut W {
self.variant(EV_A::I2S_IRQ)
}
#[doc = "AON programmable event 2. Event selected by AON_EVENT MCU event selector, AON_EVENT:EVTOMCUSEL.AON_PROG2_EV"]
#[inline(always)]
pub fn aon_prog2(self) -> &'a mut W {
self.variant(EV_A::AON_PROG2)
}
#[doc = "AON programmable event 1. Event selected by AON_EVENT MCU event selector, AON_EVENT:EVTOMCUSEL.AON_PROG1_EV"]
#[inline(always)]
pub fn aon_prog1(self) -> &'a mut W {
self.variant(EV_A::AON_PROG1)
}
#[doc = "Always inactive"]
#[inline(always)]
pub fn none(self) -> &'a mut W {
self.variant(EV_A::NONE)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f);
self.w
}
}
impl R {
#[doc = "Bits 0:6 - 6:0\\]
Read/write selection value Writing any other value than values defined by a ENUM may result in undefined behavior."]
#[inline(always)]
pub fn ev(&self) -> EV_R {
EV_R::new((self.bits & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - 6:0\\]
Read/write selection value Writing any other value than values defined by a ENUM may result in undefined behavior."]
#[inline(always)]
pub fn ev(&mut self) -> EV_W {
EV_W { w: self }
}
}
| 39.384817 | 152 | 0.628647 |
d99ad8a86fdfd0adca6c1e2d0c4e8672bf73810c | 2,810 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use std::{any::Any, collections::HashMap};
use crate::endpoint::{Endpoint, EndpointAddress};
#[cfg(doc)]
use crate::filters::Filter;
/// The input arguments to [`Filter::write`].
#[non_exhaustive]
pub struct WriteContext<'a> {
/// The upstream endpoint that we're expecting packets from.
pub endpoint: &'a Endpoint,
/// The source of the received packet.
pub from: EndpointAddress,
/// The destination of the received packet.
pub to: EndpointAddress,
/// Contents of the received packet.
pub contents: Vec<u8>,
/// Arbitrary values that can be passed from one filter to another
pub metadata: HashMap<String, Box<dyn Any + Send>>,
}
/// The output of [`Filter::write`].
///
/// New instances are created from [`WriteContext`].
///
/// ```rust
/// # use quilkin::filters::{WriteContext, WriteResponse};
/// fn write(ctx: WriteContext) -> Option<WriteResponse> {
/// Some(ctx.into())
/// }
/// ```
#[non_exhaustive]
pub struct WriteResponse {
/// Contents of the packet to be sent back to the original sender.
pub contents: Vec<u8>,
/// Arbitrary values that can be passed from one filter to another.
pub metadata: HashMap<String, Box<dyn Any + Send>>,
}
impl WriteContext<'_> {
/// Creates a new [`WriteContext`]
pub fn new(
endpoint: &Endpoint,
from: EndpointAddress,
to: EndpointAddress,
contents: Vec<u8>,
) -> WriteContext {
WriteContext {
endpoint,
from,
to,
contents,
metadata: HashMap::new(),
}
}
/// Creates a new [`WriteContext`] from a given [`WriteResponse`].
pub fn with_response(
endpoint: &Endpoint,
from: EndpointAddress,
to: EndpointAddress,
response: WriteResponse,
) -> WriteContext {
WriteContext {
endpoint,
from,
to,
contents: response.contents,
metadata: response.metadata,
}
}
}
impl From<WriteContext<'_>> for WriteResponse {
fn from(ctx: WriteContext) -> Self {
Self {
contents: ctx.contents,
metadata: ctx.metadata,
}
}
}
| 28.383838 | 75 | 0.624555 |
ac2bcc3d915d06a4ab19f4cac6ad53efeee610e9 | 1,713 | use std::collections::BTreeMap;
use datafile::DataFile;
use FlattenedData;
use Data;
use huffmantree;
use huffmantree::HuffmanTree;
use byteconvert;
pub fn compress(file: DataFile) -> (FlattenedData, HuffmanTree)
{
let tree = file.get_huffman_tree();
let lookup_map = tree.get_lookup_map();
let data = file.into_data();
let mut result: FlattenedData = Vec::new();
for byte in data
{
result.append(&mut lookup_map.get(&byte).unwrap().clone());
}
(byteconvert::get_bytes(result), tree)
}
//parsing the data and passing it to real_decompress()
pub fn decompress(data: Data) -> Option<(Data)>//Option<Data>
{
//reading byte length
let mut byte_len_vec = Vec::new();
let mut data_iter = data.into_iter();
for _ in 0..8
{
byte_len_vec.push(match data_iter.next()
{
Some(v) => v,
None => return None
});
}
let byte_len = byteconvert::u8_to_u64(byte_len_vec);
let lookup_map = match huffmantree::bytes_to_lookup_map(&mut data_iter)
{
Some(v) => v,
None => return None
};
let remaining_data = data_iter.collect::<Vec<u8>>();
Some(real_decompress(byte_len, lookup_map, remaining_data))
}
//the above compress function is the public interface and mostly needed for parsing
fn real_decompress(byte_len: u64, lookup_map: BTreeMap<FlattenedData, u8>, remaining_data: Data) -> Vec<u8>
{
let mut result = Vec::new();
let flattened_data = byteconvert::flatten_bytes(remaining_data);
let mut buffer = Vec::new();
for elem in flattened_data
{
buffer.push(elem);
if lookup_map.contains_key(&buffer)
{
let ch = lookup_map.get(&buffer).unwrap().clone();
result.push(ch.clone());
buffer.clear();
}
if (result.len() as u64) == byte_len
{
break;
}
}
result
} | 23.791667 | 107 | 0.701693 |
f5ae9c16519a231d76b27ff11c1ac7f8050e9cb8 | 10,780 | use chrono::{DateTime, Duration, Utc};
use pacosako::PlayerColor;
use serde::{Deserialize, Serialize};
use std::convert::From;
/// The timer module should encapsulate the game timer state. It is importing
/// pacosako in order to work with the player colors. Otherwise it is not
/// specific to Paco Ŝako.
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct TimerConfig {
#[serde(
serialize_with = "serialize_seconds",
deserialize_with = "deserialize_seconds"
)]
pub time_budget_white: Duration,
#[serde(
serialize_with = "serialize_seconds",
deserialize_with = "deserialize_seconds"
)]
pub time_budget_black: Duration,
#[serde(default)]
#[serde(
serialize_with = "serialize_seconds_optional",
deserialize_with = "deserialize_seconds_optional"
)]
pub increment: Option<Duration>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct Timer {
last_timestamp: DateTime<Utc>,
#[serde(
serialize_with = "serialize_seconds",
deserialize_with = "deserialize_seconds"
)]
time_left_white: Duration,
#[serde(
serialize_with = "serialize_seconds",
deserialize_with = "deserialize_seconds"
)]
time_left_black: Duration,
timer_state: TimerState,
config: TimerConfig,
}
/// There is no default implementation for serde::Serialize for Duration, so we
/// have to provide it ourself. This also gives us the flexibility to decide
/// how much precision we expose to the client.
fn serialize_seconds<S: serde::Serializer>(duration: &Duration, s: S) -> Result<S::Ok, S::Error> {
s.serialize_f32(duration.num_milliseconds() as f32 / 1000f32)
}
/// Like serialize_seconds, but optional
fn serialize_seconds_optional<S: serde::Serializer>(
duration: &Option<Duration>,
s: S,
) -> Result<S::Ok, S::Error> {
match duration {
Some(duration) => s.serialize_f32(duration.num_milliseconds() as f32 / 1000f32),
None => s.serialize_none(),
}
}
/// There is no default implementation for serde::Serialize for Duration, so we
/// have to provide it ourself. This also gives us the flexibility to decide
/// how much precision we expose to the client.
fn deserialize_seconds<'de, D: serde::Deserializer<'de>>(d: D) -> Result<Duration, D::Error> {
let seconds: f32 = serde::de::Deserialize::deserialize(d)?;
Ok(Duration::milliseconds((1000.0 * seconds) as i64))
}
/// Like deserialize_seconds, but optional
fn deserialize_seconds_optional<'de, D: serde::Deserializer<'de>>(
d: D,
) -> Result<Option<Duration>, D::Error> {
let seconds: Result<f32, D::Error> = serde::de::Deserialize::deserialize(d);
if let Ok(seconds) = seconds {
Ok(Some(Duration::milliseconds((1000.0 * seconds) as i64)))
} else {
Ok(None)
}
}
impl Timer {
// Start a timer. This does nothing when the Timer is alread running or already timed out.
pub fn start(&mut self, start_time: DateTime<Utc>) {
if self.timer_state == TimerState::NotStarted {
self.last_timestamp = start_time;
self.timer_state = TimerState::Running;
}
}
pub fn use_time(&mut self, player: PlayerColor, now: DateTime<Utc>) -> TimerState {
if self.timer_state != TimerState::Running {
return self.timer_state;
}
let time_passed: Duration = now - self.last_timestamp;
let time_left = match player {
PlayerColor::White => {
self.time_left_white = self.time_left_white - time_passed;
self.time_left_white
}
PlayerColor::Black => {
self.time_left_black = self.time_left_black - time_passed;
self.time_left_black
}
};
self.last_timestamp = now;
// Check if the time ran out
if time_left <= Duration::nanoseconds(0) {
self.timer_state = TimerState::Timeout(player);
}
self.timer_state
}
/// Stops the timer
pub fn stop(&mut self) {
self.timer_state = TimerState::Stopped
}
/// Increases the given players budget by the increment configured in the
/// timer. This can not be directly included in the use time, because a
/// player may use time multiple timer in a single turn. (Each action calls
/// use time.)
pub fn increment(&mut self, player: PlayerColor) {
if let Some(increment) = self.config.increment {
match player {
PlayerColor::White => {
self.time_left_white = self.time_left_white + increment;
}
PlayerColor::Black => {
self.time_left_black = self.time_left_black + increment;
}
}
}
}
pub fn get_state(&self) -> TimerState {
self.timer_state
}
/// Returns the time at which the timer would run out if the given player
/// retains controll until then.
pub fn timeout(&self, player: PlayerColor) -> DateTime<Utc> {
let time_left = match player {
PlayerColor::White => self.time_left_white,
PlayerColor::Black => self.time_left_black,
};
self.last_timestamp + time_left
}
}
/// Gives the current state of the timer. When the timer is running it does
/// not know which player is currently controlling it. The time will be reduced
/// when an action is send to the server.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Serialize, Deserialize)]
pub enum TimerState {
/// A timer is in this state, when the game has not started yet.
NotStarted,
/// A timer is in this state, while the game is in progress.
Running,
/// A timer is in timeout when one party runs out of time. The color stored
/// in here is the loosing player who used up their time.
Timeout(PlayerColor),
/// A timer is stopped when one party wins.
Stopped,
}
impl TimerState {
pub fn is_finished(self) -> bool {
match self {
TimerState::NotStarted => false,
TimerState::Running => false,
TimerState::Timeout(_) => true,
TimerState::Stopped => true,
}
}
}
impl From<TimerConfig> for Timer {
fn from(config: TimerConfig) -> Timer {
Timer {
last_timestamp: Utc::now(),
time_left_white: config.time_budget_white.clone(),
time_left_black: config.time_budget_black.clone(),
timer_state: TimerState::NotStarted,
config,
}
}
}
impl From<&TimerConfig> for Timer {
fn from(config: &TimerConfig) -> Timer {
Timer {
last_timestamp: Utc::now(),
time_left_white: config.time_budget_white.clone(),
time_left_black: config.time_budget_black.clone(),
timer_state: TimerState::NotStarted,
config: config.clone(),
}
}
}
#[cfg(test)]
mod test {
use super::*;
fn test_timer_config() -> TimerConfig {
TimerConfig {
time_budget_white: Duration::seconds(5 * 60),
time_budget_black: Duration::seconds(4 * 60),
increment: None,
}
}
#[test]
fn create_timer_from_config() {
let config: TimerConfig = test_timer_config();
let timer: Timer = config.into();
assert_eq!(timer.get_state(), TimerState::NotStarted);
assert_eq!(timer.time_left_white, Duration::seconds(300));
assert_eq!(timer.time_left_black, Duration::seconds(240));
}
#[test]
fn test_start_timer() {
let mut timer: Timer = test_timer_config().into();
let now = Utc::now();
timer.start(now);
assert_eq!(timer.last_timestamp, now);
assert_eq!(timer.get_state(), TimerState::Running);
let now2 = now + Duration::seconds(3);
timer.start(now2);
assert_eq!(timer.last_timestamp, now);
assert_eq!(timer.get_state(), TimerState::Running);
timer.stop();
assert_eq!(timer.get_state(), TimerState::Stopped);
}
#[test]
fn test_use_time() {
use PlayerColor::*;
let mut timer: Timer = test_timer_config().into();
let now = Utc::now();
// Using time does not work when the timer is not running
let unused_future = now + Duration::seconds(100);
timer.use_time(White, unused_future);
assert_eq!(timer.time_left_white, Duration::seconds(300));
assert_eq!(timer.time_left_black, Duration::seconds(240));
timer.start(now);
// Use 15 seconds from the white player
let now = now + Duration::seconds(15);
timer.use_time(White, now);
assert_eq!(timer.time_left_white, Duration::seconds(285));
assert_eq!(timer.time_left_black, Duration::seconds(240));
assert_eq!(timer.get_state(), TimerState::Running);
// Use 7 seconds from the black player
let now = now + Duration::seconds(7);
timer.use_time(Black, now);
assert_eq!(timer.time_left_white, Duration::seconds(285));
assert_eq!(timer.time_left_black, Duration::seconds(233));
assert_eq!(timer.get_state(), TimerState::Running);
// Use 8 seconds from the white player
let now = now + Duration::seconds(8);
timer.use_time(White, now);
assert_eq!(timer.time_left_white, Duration::seconds(277));
assert_eq!(timer.time_left_black, Duration::seconds(233));
assert_eq!(timer.get_state(), TimerState::Running);
// Use 500 seconds from the black player, this should yield a timeout.
let now = now + Duration::seconds(500);
timer.use_time(Black, now);
assert_eq!(timer.time_left_white, Duration::seconds(277));
assert_eq!(timer.time_left_black, Duration::seconds(-267));
assert_eq!(timer.get_state(), TimerState::Timeout(Black));
}
#[test]
fn test_use_increment() {
use PlayerColor::*;
let config = TimerConfig {
time_budget_white: Duration::seconds(5 * 60),
time_budget_black: Duration::seconds(5 * 60),
increment: Some(Duration::seconds(5)),
};
let mut timer: Timer = config.into();
let now = Utc::now();
timer.start(now);
// Use 15 seconds from the white player, and check that there is a 5
// 5 second increment we get back.
let now = now + Duration::seconds(15);
timer.use_time(White, now);
timer.increment(White);
assert_eq!(timer.time_left_white, Duration::seconds(290));
assert_eq!(timer.time_left_black, Duration::seconds(300));
assert_eq!(timer.get_state(), TimerState::Running);
}
}
| 33.899371 | 98 | 0.625139 |
269c892a3c6562598f323f9503cf717bc153ad82 | 6,180 | #[macro_use]
extern crate gluon_codegen;
extern crate gluon;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate gluon_vm;
mod init;
use gluon::vm::api::{self, generic::A, Generic};
use gluon::vm::{self, ExternModule};
use gluon::{import, Compiler, Thread};
use init::new_vm;
#[derive(Pushable, VmType, Serialize, Deserialize)]
#[gluon(vm_type = "types.Struct")]
struct Struct {
string: String,
number: u32,
vec: Vec<f64>,
}
fn load_struct_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
new_struct => primitive!(1 new_struct),
};
ExternModule::new(vm, module)
}
fn new_struct(_: ()) -> Struct {
Struct {
string: "hello".to_owned(),
number: 1,
vec: vec![1.0, 2.0, 3.0],
}
}
#[test]
fn normal_struct() {
let vm = new_vm();
let mut compiler = Compiler::new();
// must be generated by hand because of bug in make_source (see #542)
let src = r#"
type Struct = { string: String, number: Int, vec: Array Float }
{ Struct }
"#;
compiler.load_script(&vm, "types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_struct_mod);
let script = r#"
let { Struct } = import! types
let { new_struct } = import! functions
let { assert } = import! std.test
let { index, len } = import! std.array
let { string, number, vec } = new_struct ()
assert (string == "hello")
assert (number == 1)
assert (len vec == 3)
assert (index vec 0 == 1.0)
assert (index vec 1 == 2.0)
assert (index vec 2 == 3.0)
"#;
if let Err(why) = compiler.run_expr::<()>(&vm, "test", script) {
panic!("{}", why);
}
}
#[derive(Pushable, VmType)]
#[gluon(vm_type = "types.GenericStruct")]
struct GenericStruct<T> {
generic: T,
other: u32,
}
fn load_generic_struct_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
new_generic_struct => primitive!(1 new_generic_struct),
};
ExternModule::new(vm, module)
}
fn new_generic_struct(arg: Generic<A>) -> GenericStruct<Generic<A>> {
GenericStruct {
generic: arg,
other: 2012,
}
}
#[test]
fn generic_struct() {
let vm = new_vm();
let mut compiler = Compiler::new();
let src = r#"
type GenericStruct a = { generic: a, other: u32 }
{ GenericStruct }
"#;
compiler.load_script(&vm, "types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_generic_struct_mod);
let script = r#"
let { GenericStruct } = import! types
let { new_generic_struct } = import! functions
let { assert } = import! std.test
let { generic, other } = new_generic_struct "hi rust"
assert (generic == "hi rust")
assert (other == 2012)
let { generic, other } = new_generic_struct 3.14
assert (generic == 3.14)
assert (other == 2012)
"#;
if let Err(why) = compiler.run_expr::<()>(&vm, "test", script) {
panic!("{}", why);
}
}
#[derive(Pushable, VmType, Serialize, Deserialize)]
#[gluon(vm_type = "types.LifetimeStruct")]
struct LifetimeStruct<'a> {
string: &'a str,
other: f64,
}
fn load_lifetime_struct_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
new_lifetime_struct => primitive!(1 new_lifetime_struct),
};
ExternModule::new(vm, module)
}
fn new_lifetime_struct<'a>(_: ()) -> LifetimeStruct<'a> {
LifetimeStruct {
string: "I'm borrowed",
other: 6.6,
}
}
#[test]
fn lifetime_struct() {
let vm = new_vm();
let mut compiler = Compiler::new();
// make_source doesn't work with borrowed strings
let src = r#"
type LifetimeStruct = { string: String, other: Float }
{ LifetimeStruct }
"#;
compiler.load_script(&vm, "types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_lifetime_struct_mod);
let script = r#"
let { LifetimeStruct } = import! types
let { new_lifetime_struct } = import! functions
let { assert } = import! std.test
let { string, other } = new_lifetime_struct ()
assert (string == "I'm borrowed")
assert (other == 6.6)
"#;
if let Err(why) = compiler.run_expr::<()>(&vm, "test", script) {
panic!("{}", why);
}
}
#[derive(Pushable, VmType, Serialize, Deserialize)]
#[gluon(vm_type = "types.Enum")]
enum Enum {
Nothing,
Tuple(u32, u32),
Struct { key: String, value: String },
}
fn load_enum_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
new_enum => primitive!(1 new_enum),
};
ExternModule::new(vm, module)
}
fn new_enum(tag: u32) -> Enum {
match tag {
0 => Enum::Nothing,
1 => Enum::Tuple(1920, 1080),
_ => Enum::Struct {
key: "under the doormat".to_owned(),
value: "lots of gold".to_owned(),
},
}
}
#[test]
fn normal_enum() {
let vm = new_vm();
let mut compiler = Compiler::new();
let src = api::typ::make_source::<Enum>(&vm).unwrap();
compiler.load_script(&vm, "types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_enum_mod);
let script = r#"
let { Enum } = import! types
let { new_enum } = import! functions
let { assert } = import! std.test
let assert_enum enum tag =
let actual_tag =
match enum with
| Nothing -> 0
| Tuple x y ->
assert (x == 1920)
assert (y == 1080)
1
| Struct key value ->
assert (key == "under the doormat")
assert (value == "lots of gold")
2
assert (tag == actual_tag)
assert_enum (new_enum 0) 0
assert_enum (new_enum 1) 1
assert_enum (new_enum 2) 2
"#;
if let Err(why) = compiler.run_expr::<()>(&vm, "test", script) {
panic!("{}", why);
}
}
| 24.819277 | 74 | 0.56165 |
185b77c2225687c10f6f53bb083b5b2249998ac7 | 4,620 | use std::collections::HashMap;
use crate::compiler::ir::Code;
use crate::compiler::CompileError;
pub type ScopeId = usize;
pub type LocalId = usize;
#[derive(Debug, Clone, PartialEq)]
pub enum Tag {
IsReceiver,
}
#[derive(Debug, Clone)]
pub struct Local {
pub id: LocalId,
pub mutable: bool,
tags: Vec<Tag>,
}
impl Local {
pub fn has_tag(&self, tag: Tag) -> bool {
self.tags.contains(&tag)
}
pub fn store_instr(&self) -> Code {
if self.mutable {
Code::StoreMut(self.id)
} else {
Code::Store(self.id)
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct ForLoopMeta {
pub continue_label: String,
}
impl ForLoopMeta {
pub fn new(continue_label: String) -> Self {
Self { continue_label }
}
}
#[derive(Debug)]
pub enum ScopeContext {
Global,
Block,
IfElse,
Class(String),
ForLoop(ForLoopMeta),
Function(String),
}
impl PartialEq for ScopeContext {
fn eq(&self, other: &Self) -> bool {
match self {
ScopeContext::Block => matches!(other, ScopeContext::Block),
ScopeContext::Global => matches!(other, ScopeContext::Global),
ScopeContext::IfElse => matches!(other, ScopeContext::IfElse),
ScopeContext::Class(name) => {
matches!(other, ScopeContext::Class(other) if name == other)
}
ScopeContext::ForLoop(_) => matches!(other, ScopeContext::ForLoop(_)),
ScopeContext::Function(name) => {
matches!(other, ScopeContext::Function(other) if name == other)
}
}
}
}
#[derive(Debug)]
pub struct Scope {
pub id: ScopeId,
pub local_id: LocalId,
pub context: ScopeContext,
pub parent: Option<ScopeId>,
pub locals: HashMap<String, Local>,
}
impl Scope {
pub fn new() -> Self {
Self {
id: 0,
local_id: 0,
parent: None,
locals: HashMap::new(),
context: ScopeContext::Global,
}
}
pub fn new_child(parent: &Scope, context: ScopeContext) -> Self {
Self {
id: 0,
context,
local_id: 0,
locals: HashMap::new(),
parent: Some(parent.id),
}
}
}
#[derive(Debug)]
pub struct ScopeGraph {
current: ScopeId,
graph: Vec<Scope>,
}
impl ScopeGraph {
pub fn new() -> Self {
Self {
current: 0,
graph: vec![Scope::new()],
}
}
fn walk<'s, T>(&'s self, handler: impl Fn(&'s Scope) -> Option<T>) -> Option<T> {
for scope in self.graph.iter().rev() {
if let Some(value) = handler(scope) {
return Some(value);
}
}
None
}
fn walk_mut<'s, T>(&'s mut self, handler: impl Fn(&'s mut Scope) -> Option<T>) -> Option<T> {
for scope in self.graph.iter_mut().rev() {
if let Some(value) = handler(scope) {
return Some(value);
}
}
None
}
pub fn current_mut(&mut self) -> &mut Scope {
self.graph.last_mut().unwrap()
}
pub fn current(&mut self) -> &Scope {
self.graph.last().unwrap()
}
pub fn add(&mut self, mut scope: Scope) -> ScopeId {
self.current += 1;
scope.id = self.current;
self.graph.push(scope);
self.current
}
pub fn pop(&mut self) -> Option<Scope> {
self.graph.pop()
}
pub fn set_local(
&mut self,
name: String,
mutable: bool,
tags: Vec<Tag>,
) -> Result<usize, CompileError> {
let id = self
.walk_mut(|scope| {
if let ScopeContext::Function(_) = &scope.context {
let id = scope.local_id;
scope.local_id += 1;
return Some(id);
}
None
})
.ok_or_else(|| {
CompileError::new("unable to set local outside of a function scope".to_string())
})?;
let scope = self.current_mut();
let local = Local { id, mutable, tags };
scope.locals.insert(name, local);
Ok(id)
}
pub fn get_local(&self, name: &str, parents: bool) -> Option<&Local> {
if !parents {
return self.graph.last().and_then(|scope| scope.locals.get(name));
}
self.walk(|scope| {
if let Some(local) = scope.locals.get(name) {
return Some(local);
}
None
})
}
}
| 22.647059 | 97 | 0.514286 |
e4072229c7c35afc3e5b38ae2ac37004405d02ce | 2,470 | use std::{fs::File, path::PathBuf};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::ParquetWriter;
use super::super::values::NuDataFrame;
#[derive(Clone)]
pub struct ToParquet;
impl Command for ToParquet {
fn name(&self) -> &str {
"to parquet"
}
fn usage(&self) -> &str {
"Saves dataframe to parquet file"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
.category(Category::Custom("dataframe".into()))
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Saves dataframe to parquet file",
example: "[[a b]; [1 2] [3 4]] | into df | to parquet test.parquet",
result: None,
}]
}
fn input_type(&self) -> Type {
Type::Custom("dataframe".into())
}
fn output_type(&self) -> Type {
Type::Any
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
command(engine_state, stack, call, input)
}
}
fn command(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let file = File::create(&file_name.item).map_err(|e| {
ShellError::GenericError(
"Error with file name".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
ParquetWriter::new(file).finish(df.as_mut()).map_err(|e| {
ShellError::GenericError(
"Error saving file".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let file_value = Value::String {
val: format!("saved {:?}", &file_name.item),
span: file_name.span,
};
Ok(PipelineData::Value(
Value::List {
vals: vec![file_value],
span: call.head,
},
None,
))
}
| 24.455446 | 94 | 0.551417 |
e604ec768a5f480aa67c9d21ce2d21a8257fd5e1 | 906 | mod ctx;
mod r#impl;
mod witx;
pub use ctx::WasiNnCtx;
// Defines a `struct WasiNn` with member fields and appropriate APIs for dealing with all the
// various WASI exports.
wasmtime_wiggle::wasmtime_integration!({
// The wiggle code to integrate with lives here:
target: witx,
// This must be the same witx document as used above:
witx: ["$WASI_ROOT/phases/ephemeral/witx/wasi_ephemeral_nn.witx"],
// This must be the same ctx type as used for the target:
ctx: WasiNnCtx,
// This macro will emit a struct to represent the instance, with this name and docs:
modules: {
wasi_ephemeral_nn => {
name: WasiNn,
docs: "An instantiated instance of the wasi-nn exports.",
function_override: {}
}
},
// Error to return when caller module is missing memory export:
missing_memory: { witx::types::Errno::MissingMemory },
});
| 33.555556 | 93 | 0.674393 |
4a96fd9091a8146b28db24e2d75a896722696560 | 6,948 | use std::collections::HashMap;
use super::utils::http_get;
use crate::{error::Result, Fees, Market, MarketType, Precision, QuantityLimit};
use chrono::{prelude::*, DateTime};
use serde::{Deserialize, Serialize};
use serde_json::Value;
pub(crate) fn fetch_symbols(market_type: MarketType) -> Result<Vec<String>> {
match market_type {
MarketType::InverseSwap => fetch_inverse_swap_symbols(),
MarketType::LinearSwap => fetch_linear_swap_symbols(),
MarketType::InverseFuture => fetch_inverse_future_symbols(),
_ => panic!("Unsupported market_type: {}", market_type),
}
}
pub(crate) fn fetch_markets(market_type: MarketType) -> Result<Vec<Market>> {
match market_type {
MarketType::InverseSwap => fetch_inverse_swap_markets(),
MarketType::LinearSwap => fetch_linear_swap_markets(),
MarketType::InverseFuture => fetch_inverse_future_markets(),
_ => panic!("Unsupported market_type: {}", market_type),
}
}
#[derive(Serialize, Deserialize)]
struct LeverageFilter {
min_leverage: i64,
max_leverage: i64,
leverage_step: String,
}
#[derive(Serialize, Deserialize)]
struct PriceFilter {
min_price: String,
max_price: String,
tick_size: String,
}
#[derive(Serialize, Deserialize)]
struct LotSizeFilter {
max_trading_qty: f64,
min_trading_qty: f64,
qty_step: f64,
}
#[derive(Serialize, Deserialize)]
struct BybitMarket {
name: String,
alias: String,
status: String,
base_currency: String,
quote_currency: String,
price_scale: i64,
taker_fee: String,
maker_fee: String,
leverage_filter: LeverageFilter,
price_filter: PriceFilter,
lot_size_filter: LotSizeFilter,
#[serde(flatten)]
extra: HashMap<String, Value>,
}
#[derive(Serialize, Deserialize)]
struct Response {
ret_code: i64,
ret_msg: String,
ext_code: String,
ext_info: String,
result: Vec<BybitMarket>,
}
// See https://bybit-exchange.github.io/docs/inverse/#t-querysymbol
fn fetch_markets_raw() -> Result<Vec<BybitMarket>> {
let txt = http_get("https://api.bybit.com/v2/public/symbols", None)?;
let resp = serde_json::from_str::<Response>(&txt)?;
assert_eq!(resp.ret_code, 0);
Ok(resp
.result
.into_iter()
.filter(|m| m.status == "Trading")
.collect())
}
fn fetch_inverse_swap_symbols() -> Result<Vec<String>> {
let symbols = fetch_markets_raw()?
.into_iter()
.filter(|m| m.name == m.alias && m.quote_currency == "USD")
.map(|m| m.name)
.collect::<Vec<String>>();
Ok(symbols)
}
fn fetch_linear_swap_symbols() -> Result<Vec<String>> {
let symbols = fetch_markets_raw()?
.into_iter()
.filter(|m| m.name == m.alias && m.quote_currency == "USDT")
.map(|m| m.name)
.collect::<Vec<String>>();
Ok(symbols)
}
fn fetch_inverse_future_symbols() -> Result<Vec<String>> {
let symbols = fetch_markets_raw()?
.into_iter()
.filter(|m| {
m.quote_currency == "USD" && (&m.name[(m.name.len() - 2)..]).parse::<i64>().is_ok()
})
.map(|m| m.name)
.collect::<Vec<String>>();
Ok(symbols)
}
fn to_market(raw_market: &BybitMarket) -> Market {
let pair = crypto_pair::normalize_pair(&raw_market.name, "bybit").unwrap();
let (base, quote) = {
let v: Vec<&str> = pair.split('/').collect();
(v[0].to_string(), v[1].to_string())
};
let delivery_date: Option<u64> = if (&raw_market.name[(raw_market.name.len() - 2)..])
.parse::<i64>()
.is_ok()
{
let n = raw_market.alias.len();
let s = raw_market.alias.as_str();
let month = &s[(n - 4)..(n - 2)];
let day = &s[(n - 2)..];
let now = Utc::now();
let year = Utc::now().year();
let delivery_time = DateTime::parse_from_rfc3339(
format!("{}-{}-{}T00:00:00+00:00", year, month, day).as_str(),
)
.unwrap();
let delivery_time = if delivery_time > now {
delivery_time
} else {
DateTime::parse_from_rfc3339(
format!("{}-{}-{}T00:00:00+00:00", year + 1, month, day).as_str(),
)
.unwrap()
};
assert!(delivery_time > now);
Some(delivery_time.timestamp_millis() as u64)
} else {
None
};
Market {
exchange: "bybit".to_string(),
market_type: if raw_market.name == raw_market.alias {
MarketType::InverseFuture
} else if raw_market.quote_currency == "USDT" {
MarketType::LinearSwap
} else {
MarketType::InverseSwap
},
symbol: raw_market.name.to_string(),
base_id: raw_market.base_currency.to_string(),
quote_id: raw_market.quote_currency.to_string(),
settle_id: if raw_market.quote_currency == "USDT" {
Some(raw_market.quote_currency.to_string())
} else {
Some(raw_market.base_currency.to_string())
},
base,
quote,
settle: if raw_market.quote_currency == "USDT" {
Some(raw_market.quote_currency.to_string())
} else {
Some(raw_market.base_currency.to_string())
},
active: raw_market.status == "Trading",
margin: true,
fees: Fees {
maker: raw_market.maker_fee.parse::<f64>().unwrap(),
taker: raw_market.taker_fee.parse::<f64>().unwrap(),
},
precision: Precision {
tick_size: raw_market.price_filter.tick_size.parse::<f64>().unwrap(),
lot_size: raw_market.lot_size_filter.qty_step,
},
quantity_limit: Some(QuantityLimit {
min: raw_market.lot_size_filter.min_trading_qty,
max: Some(raw_market.lot_size_filter.max_trading_qty),
}),
contract_value: Some(1.0),
delivery_date,
info: serde_json::to_value(raw_market)
.unwrap()
.as_object()
.unwrap()
.clone(),
}
}
fn fetch_inverse_swap_markets() -> Result<Vec<Market>> {
let markets = fetch_markets_raw()?
.into_iter()
.filter(|m| m.name == m.alias && m.quote_currency == "USD")
.map(|m| to_market(&m))
.collect::<Vec<Market>>();
Ok(markets)
}
fn fetch_linear_swap_markets() -> Result<Vec<Market>> {
let markets = fetch_markets_raw()?
.into_iter()
.filter(|m| m.name == m.alias && m.quote_currency == "USDT")
.map(|m| to_market(&m))
.collect::<Vec<Market>>();
Ok(markets)
}
fn fetch_inverse_future_markets() -> Result<Vec<Market>> {
let markets = fetch_markets_raw()?
.into_iter()
.filter(|m| {
m.quote_currency == "USD" && (&m.name[(m.name.len() - 2)..]).parse::<i64>().is_ok()
})
.map(|m| to_market(&m))
.collect::<Vec<Market>>();
Ok(markets)
}
| 30.88 | 95 | 0.590818 |
1efc873bee2668ccc0c0934610cc213cede77605 | 360 | fn call_it<F>(f: F) where F: Fn() { f(); }
struct A;
impl A {
fn gen(&self) {}
fn gen_mut(&mut self) {}
}
fn main() {
let mut x = A;
call_it(|| {
call_it(|| x.gen());
call_it(|| x.gen_mut()); //~ ERROR cannot borrow data mutably in a captured outer
//~^ ERROR cannot borrow data mutably in a captured outer
});
}
| 20 | 89 | 0.536111 |
f51dbd14b5312fa1da701c89c69e1bbed152dd03 | 4,994 | use ark_std::{
io::{Result as IoResult, Write},
vec::Vec,
};
use ark_ff::{
bytes::ToBytes,
fields::{Field, Fp2},
};
use num_traits::{One, Zero};
use crate::{
bn::{BnParameters, TwistType},
models::SWModelParameters,
short_weierstrass_jacobian::{GroupAffine, GroupProjective},
AffineCurve,
};
pub type G2Affine<P> = GroupAffine<<P as BnParameters>::G2Parameters>;
pub type G2Projective<P> = GroupProjective<<P as BnParameters>::G2Parameters>;
#[derive(Derivative)]
#[derivative(
Clone(bound = "P: BnParameters"),
Debug(bound = "P: BnParameters"),
PartialEq(bound = "P: BnParameters"),
Eq(bound = "P: BnParameters")
)]
pub struct G2Prepared<P: BnParameters> {
// Stores the coefficients of the line evaluations as calculated in
// https://eprint.iacr.org/2013/722.pdf
pub ell_coeffs: Vec<(Fp2<P::Fp2Params>, Fp2<P::Fp2Params>, Fp2<P::Fp2Params>)>,
pub infinity: bool,
}
#[derive(Derivative)]
#[derivative(
Clone(bound = "P: BnParameters"),
Copy(bound = "P: BnParameters"),
Debug(bound = "P: BnParameters")
)]
struct G2HomProjective<P: BnParameters> {
x: Fp2<P::Fp2Params>,
y: Fp2<P::Fp2Params>,
z: Fp2<P::Fp2Params>,
}
impl<P: BnParameters> Default for G2Prepared<P> {
fn default() -> Self {
Self::from(G2Affine::<P>::prime_subgroup_generator())
}
}
impl<P: BnParameters> ToBytes for G2Prepared<P> {
fn write<W: Write>(&self, mut writer: W) -> IoResult<()> {
for coeff in &self.ell_coeffs {
coeff.0.write(&mut writer)?;
coeff.1.write(&mut writer)?;
coeff.2.write(&mut writer)?;
}
self.infinity.write(writer)
}
}
impl<P: BnParameters> From<G2Affine<P>> for G2Prepared<P> {
fn from(q: G2Affine<P>) -> Self {
let two_inv = P::Fp::one().double().inverse().unwrap();
if q.is_zero() {
return Self {
ell_coeffs: vec![],
infinity: true,
};
}
let mut ell_coeffs = vec![];
let mut r = G2HomProjective {
x: q.x,
y: q.y,
z: Fp2::one(),
};
let negq = -q;
for i in (1..P::ATE_LOOP_COUNT.len()).rev() {
ell_coeffs.push(doubling_step::<P>(&mut r, &two_inv));
let bit = P::ATE_LOOP_COUNT[i - 1];
match bit {
1 => {
ell_coeffs.push(addition_step::<P>(&mut r, &q));
}
-1 => {
ell_coeffs.push(addition_step::<P>(&mut r, &negq));
}
_ => continue,
}
}
let q1 = mul_by_char::<P>(q);
let mut q2 = mul_by_char::<P>(q1);
if P::ATE_LOOP_COUNT_IS_NEGATIVE {
r.y = -r.y;
}
q2.y = -q2.y;
ell_coeffs.push(addition_step::<P>(&mut r, &q1));
ell_coeffs.push(addition_step::<P>(&mut r, &q2));
Self {
ell_coeffs,
infinity: false,
}
}
}
impl<P: BnParameters> G2Prepared<P> {
pub fn is_zero(&self) -> bool {
self.infinity
}
}
fn mul_by_char<P: BnParameters>(r: G2Affine<P>) -> G2Affine<P> {
// multiply by field characteristic
let mut s = r;
s.x.frobenius_map(1);
s.x *= &P::TWIST_MUL_BY_Q_X;
s.y.frobenius_map(1);
s.y *= &P::TWIST_MUL_BY_Q_Y;
s
}
fn doubling_step<B: BnParameters>(
r: &mut G2HomProjective<B>,
two_inv: &B::Fp,
) -> (Fp2<B::Fp2Params>, Fp2<B::Fp2Params>, Fp2<B::Fp2Params>) {
// Formula for line function when working with
// homogeneous projective coordinates.
let mut a = r.x * &r.y;
a.mul_assign_by_fp(two_inv);
let b = r.y.square();
let c = r.z.square();
let e = B::G2Parameters::COEFF_B * &(c.double() + &c);
let f = e.double() + &e;
let mut g = b + &f;
g.mul_assign_by_fp(two_inv);
let h = (r.y + &r.z).square() - &(b + &c);
let i = e - &b;
let j = r.x.square();
let e_square = e.square();
r.x = a * &(b - &f);
r.y = g.square() - &(e_square.double() + &e_square);
r.z = b * &h;
match B::TWIST_TYPE {
TwistType::M => (i, j.double() + &j, -h),
TwistType::D => (-h, j.double() + &j, i),
}
}
fn addition_step<B: BnParameters>(
r: &mut G2HomProjective<B>,
q: &G2Affine<B>,
) -> (Fp2<B::Fp2Params>, Fp2<B::Fp2Params>, Fp2<B::Fp2Params>) {
// Formula for line function when working with
// homogeneous projective coordinates.
let theta = r.y - &(q.y * &r.z);
let lambda = r.x - &(q.x * &r.z);
let c = theta.square();
let d = lambda.square();
let e = lambda * &d;
let f = r.z * &c;
let g = r.x * &d;
let h = e + &f - &g.double();
r.x = lambda * &h;
r.y = theta * &(g - &h) - &(e * &r.y);
r.z *= &e;
let j = theta * &q.x - &(lambda * &q.y);
match B::TWIST_TYPE {
TwistType::M => (j, -theta, lambda),
TwistType::D => (lambda, -theta, j),
}
}
| 26.284211 | 83 | 0.532839 |
2854ffce2289de253e95f41ae3fe65bd0748c4a3 | 749 | use crate::common::*;
/// A single function parameter
#[derive(PartialEq, Debug)]
pub(crate) struct Parameter<'src> {
/// The parameter name
pub(crate) name: Name<'src>,
/// Parameter is variadic
pub(crate) variadic: bool,
/// An optional default expression
pub(crate) default: Option<Expression<'src>>,
}
impl<'src> Display for Parameter<'src> {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
let color = Color::fmt(f);
if self.variadic {
write!(f, "{}", color.annotation().paint("+"))?;
}
write!(f, "{}", color.parameter().paint(self.name.lexeme()))?;
if let Some(ref default) = self.default {
write!(f, "={}", color.string().paint(&default.to_string()))?;
}
Ok(())
}
}
| 27.740741 | 68 | 0.600801 |
e41e1ed96b1fdc4de4526657fa08c6dae0175607 | 4,040 | // Copyright (c) The Dijets Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Instrument `assert false;` in strategic locations in the program such that if proved, signals
//! an inconsistency among the specifications.
//!
//! The presence of inconsistency is a serious issue. If there is an inconsistency in the
//! verification assumptions (perhaps due to a specification mistake or a Prover bug), any false
//! post-condition can be proved vacuously. The `InconsistencyCheckInstrumentationProcessor` adds
//! an `assert false` before
//! - every `return` and
//! - every `abort` (if the `unconditional-abort-as-inconsistency` option is set).
//! In this way, if the instrumented `assert false` can be proved, it means we have an inconsistency
//! in the specifications.
//!
//! A function that unconditionally abort might be considered as some form of inconsistency as well.
//! Consider the function `fun always_abort() { abort 0 }`, it might seem surprising that the prover
//! can prove that `spec always_abort { ensures 1 == 2; }`. If this function aborts unconditionally,
//! any post-condition can be proved. Checking of this behavior is turned-off by default, and can
//! be enabled with the `unconditional-abort-as-inconsistency` flag.
use crate::{
function_data_builder::FunctionDataBuilder,
function_target::FunctionData,
function_target_pipeline::{
FunctionTargetProcessor, FunctionTargetsHolder, FunctionVariant, VerificationFlavor,
},
options::ProverOptions,
stackless_bytecode::{Bytecode, PropKind},
};
use move_model::{exp_generator::ExpGenerator, model::FunctionEnv};
// This message is for the boogie wrapper, and not shown to the users.
const EXPECTED_TO_FAIL: &str = "expected to fail";
pub struct InconsistencyCheckInstrumenter {}
impl InconsistencyCheckInstrumenter {
pub fn new() -> Box<Self> {
Box::new(Self {})
}
}
impl FunctionTargetProcessor for InconsistencyCheckInstrumenter {
fn process(
&self,
targets: &mut FunctionTargetsHolder,
fun_env: &FunctionEnv<'_>,
data: FunctionData,
) -> FunctionData {
if fun_env.is_native() || fun_env.is_intrinsic() {
// Nothing to do.
return data;
}
let flavor = match &data.variant {
FunctionVariant::Baseline
| FunctionVariant::Verification(VerificationFlavor::Inconsistency(..)) => {
// instrumentation only applies to non-inconsistency verification variants
return data;
}
FunctionVariant::Verification(flavor) => flavor.clone(),
};
// obtain the options first
let options = ProverOptions::get(fun_env.module_env.env);
// create a clone of the data for inconsistency check
let new_data = data.fork(FunctionVariant::Verification(
VerificationFlavor::Inconsistency(Box::new(flavor)),
));
// instrumentation
let mut builder = FunctionDataBuilder::new(fun_env, new_data);
let old_code = std::mem::take(&mut builder.data.code);
for bc in old_code {
if matches!(bc, Bytecode::Ret(..))
|| (matches!(bc, Bytecode::Abort(..))
&& !options.unconditional_abort_as_inconsistency)
{
let loc = builder.fun_env.get_spec_loc();
builder.set_loc_and_vc_info(loc, EXPECTED_TO_FAIL);
let exp = builder.mk_bool_const(false);
builder.emit_with(|id| Bytecode::Prop(id, PropKind::Assert, exp));
}
builder.emit(bc);
}
// add the new variant to targets
let new_data = builder.data;
targets.insert_target_data(
&fun_env.get_qualified_id(),
new_data.variant.clone(),
new_data,
);
// the original function data is unchanged
data
}
fn name(&self) -> String {
"inconsistency_check_instrumenter".to_string()
}
}
| 38.47619 | 100 | 0.654703 |
61e74e161325e18e70c128948a9ddf7bc9e9af62 | 538 | //!Simple HTTP client with built-in HTTPS support.
//!Currently it's in heavy development and may frequently change.
//!
//!## Example
//!Basic GET request
//!```
//!use http_req::request;
//!
//!fn main() {
//! let mut writer = Vec::new(); //container for body of a response
//! let res = request::get("https://doc.rust-lang.org/", &mut writer).unwrap();
//!
//! println!("Status: {} {}", res.status_code(), res.reason());
//!}
//!```
pub mod error;
pub mod request;
pub mod response;
// pub mod tls;
pub mod uri;
mod chunked;
| 23.391304 | 82 | 0.620818 |
1e9ea8d8218d800801ecf0ee8dbd8c481243f6c8 | 629 | /// PubKey defines a secp256r1 ECDSA public key.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PubKey {
/// Point on secp256r1 curve in a compressed representation as specified in section
/// 4.3.6 of ANSI X9.62: <https://webstore.ansi.org/standards/ascx9/ansix9621998>
#[prost(bytes="vec", tag="1")]
pub key: ::prost::alloc::vec::Vec<u8>,
}
/// PrivKey defines a secp256r1 ECDSA private key.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PrivKey {
/// secret number serialized using big-endian encoding
#[prost(bytes="vec", tag="1")]
pub secret: ::prost::alloc::vec::Vec<u8>,
}
| 39.3125 | 87 | 0.678855 |
de99fcf168b0e3b91e8e467722ce0412e80e96d5 | 5,015 | //! Decryptors for age.
use age_core::format::{FileKey, Stanza};
use secrecy::SecretString;
use std::io::Read;
use super::Nonce;
use crate::{
error::DecryptError,
format::Header,
keys::v1_payload_key,
primitives::stream::{PayloadKey, Stream, StreamReader},
scrypt, Identity,
};
#[cfg(feature = "async")]
use futures::io::AsyncRead;
struct BaseDecryptor<R> {
/// The age file.
input: R,
/// The age file's header.
header: Header,
/// The age file's AEAD nonce
nonce: Nonce,
}
impl<R> BaseDecryptor<R> {
fn obtain_payload_key<F>(&self, mut filter: F) -> Result<PayloadKey, DecryptError>
where
F: FnMut(&[Stanza]) -> Option<Result<FileKey, DecryptError>>,
{
match &self.header {
Header::V1(header) => filter(&header.recipients)
.unwrap_or(Err(DecryptError::NoMatchingKeys))
.and_then(|file_key| v1_payload_key(&file_key, header, &self.nonce)),
Header::Unknown(_) => unreachable!(),
}
}
}
/// Decryptor for an age file encrypted to a list of recipients.
pub struct RecipientsDecryptor<R>(BaseDecryptor<R>);
impl<R> RecipientsDecryptor<R> {
pub(super) fn new(input: R, header: Header, nonce: Nonce) -> Self {
RecipientsDecryptor(BaseDecryptor {
input,
header,
nonce,
})
}
fn obtain_payload_key(
&self,
mut identities: impl Iterator<Item = Box<dyn Identity>>,
) -> Result<PayloadKey, DecryptError> {
self.0
.obtain_payload_key(|r| identities.find_map(|key| key.unwrap_stanzas(r)))
}
}
impl<R: Read> RecipientsDecryptor<R> {
/// Attempts to decrypt the age file.
///
/// If successful, returns a reader that will provide the plaintext.
pub fn decrypt(
self,
identities: impl Iterator<Item = Box<dyn Identity>>,
) -> Result<StreamReader<R>, DecryptError> {
self.obtain_payload_key(identities)
.map(|payload_key| Stream::decrypt(payload_key, self.0.input))
}
}
#[cfg(feature = "async")]
#[cfg_attr(docsrs, doc(cfg(feature = "async")))]
impl<R: AsyncRead + Unpin> RecipientsDecryptor<R> {
/// Attempts to decrypt the age file.
///
/// If successful, returns a reader that will provide the plaintext.
pub fn decrypt_async(
self,
identities: impl Iterator<Item = Box<dyn Identity>>,
) -> Result<StreamReader<R>, DecryptError> {
self.obtain_payload_key(identities)
.map(|payload_key| Stream::decrypt_async(payload_key, self.0.input))
}
}
/// Decryptor for an age file encrypted with a passphrase.
pub struct PassphraseDecryptor<R>(BaseDecryptor<R>);
impl<R> PassphraseDecryptor<R> {
pub(super) fn new(input: R, header: Header, nonce: Nonce) -> Self {
PassphraseDecryptor(BaseDecryptor {
input,
header,
nonce,
})
}
fn obtain_payload_key(
&self,
passphrase: &SecretString,
max_work_factor: Option<u8>,
) -> Result<PayloadKey, DecryptError> {
let identity = scrypt::Identity {
passphrase,
max_work_factor,
};
self.0.obtain_payload_key(|r| identity.unwrap_stanzas(r))
}
}
impl<R: Read> PassphraseDecryptor<R> {
/// Attempts to decrypt the age file.
///
/// `max_work_factor` is the maximum accepted work factor. If `None`, the default
/// maximum is adjusted to around 16 seconds of work.
///
/// If successful, returns a reader that will provide the plaintext.
pub fn decrypt(
self,
passphrase: &SecretString,
max_work_factor: Option<u8>,
) -> Result<StreamReader<R>, DecryptError> {
self.obtain_payload_key(passphrase, max_work_factor)
.map(|payload_key| Stream::decrypt(payload_key, self.0.input))
}
}
#[cfg(feature = "async")]
#[cfg_attr(docsrs, doc(cfg(feature = "async")))]
impl<R: AsyncRead + Unpin> PassphraseDecryptor<R> {
/// Attempts to decrypt the age file.
///
/// `max_work_factor` is the maximum accepted work factor. If `None`, the default
/// maximum is adjusted to around 16 seconds of work.
///
/// If successful, returns a reader that will provide the plaintext.
pub fn decrypt_async(
self,
passphrase: &SecretString,
max_work_factor: Option<u8>,
) -> Result<StreamReader<R>, DecryptError> {
self.obtain_payload_key(passphrase, max_work_factor)
.map(|payload_key| Stream::decrypt_async(payload_key, self.0.input))
}
}
/// Callbacks that might be triggered during decryption.
pub trait Callbacks {
/// Shows a message to the user.
///
/// This can be used to prompt the user to take some physical action, such as
/// inserting a hardware key.
fn prompt(&self, message: &str);
/// Requests a passphrase to decrypt a key.
fn request_passphrase(&self, description: &str) -> Option<SecretString>;
}
| 30.579268 | 86 | 0.62991 |
f872e1f41a559c96151d6a2ec7d467129628cf77 | 4,824 | //! JSON文字列をパースするときに型変換行うための関数を定義する。
use crate::error::Error;
use crate::response::{ErrorResponse, RawResponse, RestResponse};
use chrono::{DateTime, Utc};
use serde::{de, Deserialize, Deserializer};
use serde_json::Value;
/// strからf64への変換を行う。
pub(crate) fn str_to_f64<'de, D: Deserializer<'de>>(deserializer: D) -> Result<f64, D::Error> {
Ok(match Value::deserialize(deserializer)? {
Value::String(s) => s.parse().map_err(de::Error::custom)?,
Value::Number(num) => {
num.as_f64()
.ok_or_else(|| de::Error::custom("Invalid number"))? as f64
}
_ => return Err(de::Error::custom("wrong type")),
})
}
/// strからi64への変換を行う。
pub(crate) fn str_to_i64<'de, D: Deserializer<'de>>(deserializer: D) -> Result<i64, D::Error> {
Ok(match Value::deserialize(deserializer)? {
Value::String(s) => s.parse().map_err(de::Error::custom)?,
Value::Number(num) => {
num.as_i64()
.ok_or_else(|| de::Error::custom("Invalid number"))? as i64
}
_ => return Err(de::Error::custom("wrong type")),
})
}
/// Id(注文Idや約定Id)を文字列に変換する。
/// GMOコインのお知らせを見るとIdは2020年11月4日から文字列になると書いてあるが、2020年11月14日現在数値で返ってくる。
/// 将来的に文字列に変更されてもいいように、数値でも文字列でも文字列に直すようにしておく。
pub(crate) fn id_to_str<'de, D: Deserializer<'de>>(deserializer: D) -> Result<String, D::Error> {
Ok(match Value::deserialize(deserializer)? {
Value::String(s) => s,
Value::Number(num) => num.to_string(),
_ => return Err(de::Error::custom("wrong type")),
})
}
/// Idを数値に変換する。
pub(crate) fn id_to_num(id: &str) -> Result<i32, Error> {
Ok(match id.parse::<i32>() {
Ok(n) => n,
Err(_) => return Err(Error::IdToNumberError(id.to_string())),
})
}
/// 複数のIdが格納された配列を要素が文字列となるようにに変換する。
pub(crate) fn ids_to_strvec<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<Vec<String>, D::Error> {
let mut strvec = Vec::<String>::new();
match Value::deserialize(deserializer)? {
Value::Array(array) => {
for v in array {
let id = match v {
Value::String(s) => s,
Value::Number(n) => n.to_string(),
_ => return Err(de::Error::custom("wrong type")),
};
strvec.push(id);
}
}
_ => return Err(de::Error::custom("wrong type")),
}
Ok(strvec)
}
/// GMOコインAPIから返ってくるタイムスタンプをchronoの日時に変換する。
/// GMOコインのタイムスタンプはUTC。この関数でもUTCの日時を返す。
pub(crate) fn gmo_timestamp_to_chrono_timestamp<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<DateTime<Utc>, D::Error> {
let s: String = Deserialize::deserialize(deserializer)?;
Ok(
match chrono::naive::NaiveDateTime::parse_from_str(&s, "%Y-%m-%dT%H:%M:%S.%3fZ") {
Ok(date) => DateTime::<Utc>::from_utc(date, Utc),
Err(_) => return Err(de::Error::custom("wrong datetime format")),
},
)
}
/// GMOコインのAPIを呼び出して得られるHTTPレスポンスをええ感じに構造体RestResponse<T>に詰めなおす
pub(crate) fn parse_from_http_response<'a, T>(
http_response: &'a RawResponse,
) -> Result<RestResponse<T>, Error>
where
T: serde::de::Deserialize<'a>,
{
let body: Result<T, serde_json::Error> = serde_json::from_str(&http_response.body_text);
Ok(match body {
Ok(b) => RestResponse {
http_status_code: http_response.http_status_code,
body: b,
},
Err(_) => {
let err_resp: ErrorResponse = serde_json::from_str(&http_response.body_text)?;
return Err(Error::APIError(err_resp));
}
})
}
#[cfg(test)]
mod tests {
use crate::json::{gmo_timestamp_to_chrono_timestamp, str_to_f64, str_to_i64};
use chrono::*;
use serde::Deserialize;
#[derive(Deserialize)]
struct Number {
#[serde(deserialize_with = "str_to_i64")]
i: i64,
#[serde(deserialize_with = "str_to_f64")]
f: f64,
}
#[derive(Deserialize)]
struct Date {
#[serde(deserialize_with = "gmo_timestamp_to_chrono_timestamp")]
d: DateTime<Utc>,
}
#[test]
fn test_str_to_numbers() {
let json_str = r#"{"i": "100", "f": "-10.55"}"#;
let json: Number = serde_json::from_str(&json_str).unwrap();
assert_eq!(json.i, 100);
assert_eq!(json.f, -10.55);
}
#[test]
fn test_str_to_datetime() {
let json_str = r#"{"d": "2019-03-19T02:15:06.001Z"}"#;
let json: Date = serde_json::from_str(&json_str).unwrap();
assert_eq!(json.d.year(), 2019);
assert_eq!(json.d.month(), 3);
assert_eq!(json.d.day(), 19);
assert_eq!(json.d.hour(), 2);
assert_eq!(json.d.minute(), 15);
assert_eq!(json.d.second(), 6);
assert_eq!(json.d.timestamp_subsec_millis(), 1);
}
}
| 32.375839 | 97 | 0.58893 |
d6ae8b030e18e37b265d3c0932ffd49f747e9f38 | 1,870 | //! Tests auto-converted from "sass-spec/spec/css/moz_document"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/css/moz_document/empty_prefix.hrx"
#[test]
fn empty_prefix() {
assert_eq!(
rsass(
"// An empty url-prefix() should not be deprecated yet, as it\'s still supported\
\n// in Firefox\'s release channel at time of writing.\
\n\
\n@-moz-document url-prefix() {\
\n a {b: c}\
\n}\
\n\
\n@-moz-document url-prefix(\"\") {\
\n a {b: c}\
\n}\
\n\
\n@-moz-document url-prefix(\'\') {\
\n a {b: c}\
\n}\
\n"
)
.unwrap(),
"@-moz-document url-prefix() {\
\n a {\
\n b: c;\
\n }\
\n}\
\n@-moz-document url-prefix(\"\") {\
\n a {\
\n b: c;\
\n }\
\n}\
\n@-moz-document url-prefix(\'\') {\
\n a {\
\n b: c;\
\n }\
\n}\
\n"
);
}
mod functions;
// From "sass-spec/spec/css/moz_document/multi_function.hrx"
#[test]
#[ignore] // wrong result
fn multi_function() {
assert_eq!(
rsass(
"@-moz-document url(http://www.w3.org/),\
\n url-prefix(http://www.w3.org/Style/),\
\n domain(mozilla.org),\
\n regexp(\"https:.*\") {\
\n a {b: c}\
\n}\
\n"
)
.unwrap(),
"@-moz-document url(http://www.w3.org/),\
\n url-prefix(http://www.w3.org/Style/),\
\n domain(mozilla.org),\
\n regexp(\"https:.*\") {\
\n a {\
\n b: c;\
\n }\
\n}\
\n"
);
}
| 25.27027 | 93 | 0.383422 |
79dd7ed59020a02108b6e44ee7696e44436eb5a6 | 2,834 | use super::{ffi, Context, Error};
use crate::{MouseButton, ScreenContext};
// The implementation of MouseContext is adapted from here:
// https://github.com/ccMSC/ckb/blob/master/src/ckb-daemon/input_mac.c
fn button_event(ctx: &mut Context, event_type: u32, button_number: u8, down: bool) -> Result<(), Error> {
let mut event = ffi::NXEventData::default();
event.compound.subType = ffi::NX_SUBTYPE_AUX_MOUSE_BUTTONS;
unsafe {
event.compound.misc.L[0] = 1 << button_number;
event.compound.misc.L[1] = if down { 1 << button_number } else { 0 };
}
ctx.post_event(ffi::NX_SYSDEFINED, &event, 0, 0)?;
event = ffi::NXEventData::default();
event.mouse.buttonNumber = button_number;
ctx.post_event(event_type, &event, 0, 0)
}
impl crate::MouseContext for Context {
fn mouse_move_rel(&mut self, dx: i32, dy: i32) -> Result<(), Error> {
let mut event = ffi::NXEventData::default();
event.mouseMove.dx = dx;
event.mouseMove.dy = dy;
let mut event_type = ffi::NX_MOUSEMOVED;
if self.button_state & 0b1 != 0 {
event_type = ffi::NX_LMOUSEDRAGGED;
} else if self.button_state & 0b10 != 0 {
event_type = ffi::NX_RMOUSEDRAGGED;
} else if self.button_state & 0b100 != 0 {
event_type = ffi::NX_OMOUSEDRAGGED;
}
self.post_event(event_type, &event, 0, ffi::kIOHIDSetRelativeCursorPosition)
}
fn mouse_move_abs(&mut self, x: i32, y: i32) -> Result<(), Error> {
let location = self.cursor_location()?;
self.mouse_move_rel(x - location.0, y - location.1)
}
fn mouse_scroll(&mut self, dx: i32, dy: i32) -> Result<(), Error> {
let mut event = ffi::NXEventData::default();
event.scrollWheel.fixedDeltaAxis1 = dy << 13;
event.scrollWheel.fixedDeltaAxis2 = dx << 13;
self.post_event(ffi::NX_SCROLLWHEELMOVED, &event, 0, 0)
}
fn mouse_down(&mut self, button: MouseButton) -> Result<(), Error> {
let (event_type, button_number) = match button {
MouseButton::Left => (ffi::NX_LMOUSEDOWN, 0),
MouseButton::Right => (ffi::NX_RMOUSEDOWN, 1),
MouseButton::Middle => (ffi::NX_OMOUSEDOWN, 2),
};
button_event(self, event_type, button_number, true)?;
self.button_state |= 1 << button_number;
Ok(())
}
fn mouse_up(&mut self, button: MouseButton) -> Result<(), Error> {
let (event_type, button_number) = match button {
MouseButton::Left => (ffi::NX_LMOUSEUP, 0),
MouseButton::Right => (ffi::NX_RMOUSEUP, 1),
MouseButton::Middle => (ffi::NX_OMOUSEUP, 2),
};
button_event(self, event_type, button_number, false)?;
self.button_state &= !(1 << button_number);
Ok(())
}
}
| 37.786667 | 105 | 0.611503 |
22cf46b1ad320eb43e1f9f809a19a6459d6e9548 | 1,153 | // Take a look at the license at the top of the repository in the LICENSE file.
use crate::{RenderNode, RenderNodeType};
use glib::translate::*;
define_render_node!(
ContainerNode,
ffi::GskContainerNode,
ffi::gsk_container_node_get_type,
RenderNodeType::ContainerNode
);
impl ContainerNode {
#[doc(alias = "gsk_container_node_new")]
pub fn new(children: &[RenderNode]) -> ContainerNode {
assert_initialized_main_thread!();
let n_children = children.len() as u32;
unsafe {
from_glib_full(ffi::gsk_container_node_new(
children.to_glib_none().0,
n_children,
))
}
}
#[doc(alias = "gsk_container_node_get_child")]
pub fn get_child(&self, idx: u32) -> Option<RenderNode> {
unsafe {
from_glib_none(ffi::gsk_container_node_get_child(
self.to_glib_none().0,
idx,
))
}
}
#[doc(alias = "gsk_container_node_get_n_children")]
pub fn get_n_children(&self) -> u32 {
unsafe { ffi::gsk_container_node_get_n_children(self.to_glib_none().0) }
}
}
| 28.121951 | 80 | 0.614918 |
019f7a394f20f8f6ee5ec1adfa12511ceade62cc | 2,886 | mod calibration;
mod hid;
mod image;
mod imu_handler;
pub use crate::image::*;
use anyhow::Result;
pub use calibration::*;
use cgmath::vec3;
pub use hid::*;
use hid_gamepad_sys::{GamepadDevice, GamepadDriver, JoyKey, Motion};
use hidapi::HidApi;
pub use imu_handler::IMU;
pub use joycon_sys;
pub use hidapi;
use joycon_sys::{imu::IMU_SAMPLES_PER_SECOND, NINTENDO_VENDOR_ID};
pub struct JoyconDriver;
impl GamepadDriver for JoyconDriver {
fn init(
&self,
api: &HidApi,
device_info: &hidapi::DeviceInfo,
) -> Result<Option<Box<dyn GamepadDevice>>> {
if device_info.vendor_id() == NINTENDO_VENDOR_ID {
let mut joycon = JoyCon::new(device_info.open_device(api)?, device_info.clone())?;
joycon.enable_imu()?;
joycon.load_calibration()?;
Ok(Some(Box::new(joycon)))
} else {
Ok(None)
}
}
}
impl GamepadDevice for JoyCon {
fn recv(&mut self) -> Result<hid_gamepad_sys::Report> {
Ok(self.tick()?.into())
}
fn as_any(&mut self) -> &mut dyn std::any::Any {
self
}
}
impl From<Report> for hid_gamepad_sys::Report {
fn from(report: Report) -> Self {
let b = &report.buttons;
Self {
left_joystick: report.left_stick,
right_joystick: report.right_stick,
motion: report
.imu
.unwrap()
.iter()
.map(|x| Motion {
acceleration: vec3(-x.accel.y, x.accel.z, x.accel.x).into(),
rotation_speed: vec3(x.gyro.y, -x.gyro.z, -x.gyro.x).into(),
})
.collect(),
keys: enum_map::enum_map! {
JoyKey::Up => b.left.up().into(),
JoyKey::Down => b.left.down().into(),
JoyKey::Left => b.left.left().into(),
JoyKey::Right=> b.left.right().into(),
JoyKey::N => b.right.x().into(),
JoyKey::S => b.right.b().into(),
JoyKey::E => b.right.a().into(),
JoyKey::W => b.right.y().into(),
JoyKey::L=> b.left.l().into(),
JoyKey::R=> b.right.r().into(),
JoyKey::ZL => b.left.zl().into(),
JoyKey::ZR => b.right.zr().into(),
JoyKey::SL => (b.left.sl() | b.right.sl()).into(),
JoyKey::SR => (b.left.sr() | b.right.sr()).into(),
JoyKey::L3 => b.middle.lstick().into(),
JoyKey::R3 => b.middle.rstick().into(),
JoyKey::Minus => b.middle.minus().into(),
JoyKey::Plus => b.middle.plus().into(),
JoyKey::Capture => b.middle.capture().into(),
JoyKey::Home => b.middle.home().into(),
},
frequency: IMU_SAMPLES_PER_SECOND,
}
}
}
| 32.426966 | 94 | 0.506584 |
486bc22e1ed4a9c1b25947395f8038c548a23e43 | 4,289 | use crate::thread_pool::ThreadPool;
use crate::{
common::{GetResponse, RemoveResponse, Request, SetResponse},
KvsEngine, Result,
};
use serde_json::Deserializer;
use slog;
use slog_term;
use slog::Drain;
use std::io::{self, BufReader, BufWriter, Write};
use std::net::{TcpListener, TcpStream, ToSocketAddrs};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread::{self, JoinHandle};
use std::time::Duration;
/// The Server of a key value store
pub struct KvsServer<E: KvsEngine, P: ThreadPool> {
engine: E,
logger: slog::Logger,
pool: P,
handle: Option<JoinHandle<()>>,
shutdown: Arc<AtomicBool>,
}
impl<E: KvsEngine, P: ThreadPool> KvsServer<E, P> {
/// Create a `KvsServer` with a given storage engine.
pub fn new(engine: E, logger: slog::Logger, pool: P) -> Self {
Self {
engine,
logger,
pool,
handle: None,
shutdown: Arc::new(AtomicBool::new(false)),
}
}
/// Shutdown the server
pub fn shutdown(&mut self) {
self.shutdown.store(true, Ordering::Relaxed);
let handle = self.handle.take().unwrap();
handle.join().unwrap();
}
/// Run the server listening on the given address
pub fn run(&mut self, addr: impl ToSocketAddrs) -> Result<()> {
let listener = TcpListener::bind(addr)?;
listener.set_nonblocking(true)?;
let shutdown = self.shutdown.clone();
let engine = self.engine.clone();
let logger = self.logger.clone();
let pool = self.pool.clone();
let handle = thread::spawn(move || {
for stream in listener.incoming() {
match stream {
Ok(stream) => {
let engine = engine.clone();
pool.spawn(|| {
if let Err(e) = serve(engine, stream) {
eprintln!("Error on serving client: {}", e);
}
});
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
if shutdown.load(Ordering::Relaxed) {
break;
}
thread::sleep(Duration::from_millis(10));
continue;
}
Err(e) => slog::error!(logger, "Connection failed: {}", e),
}
}
});
self.handle.replace(handle);
Ok(())
}
}
/// Handle Request from client, do the Kvstore query and return result to client via `TcpStream`
fn serve<E: KvsEngine>(engine: E, stream: TcpStream) -> Result<()> {
let peer_addr = stream.peer_addr()?;
let reader = BufReader::new(&stream);
let mut writer = BufWriter::new(&stream);
let req_reader = Deserializer::from_reader(reader).into_iter::<Request>();
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator).build().fuse();
let drain = std::sync::Mutex::new(drain).fuse();
let logger = slog::Logger::root(drain, slog::o!());
macro_rules! sent_resp {
( $resp:expr ) => {{
let resp = $resp;
serde_json::to_writer(&mut writer, &resp)?;
writer.flush()?;
slog::debug!(logger, "Response sent to {}: {:?}", peer_addr, resp);
};};
}
for req in req_reader {
let req = req?;
slog::debug!(logger, "Receive request from {}: {:?}", peer_addr, req);
match req {
Request::Get { key } => sent_resp!(match engine.get(key) {
Ok(value) => GetResponse::Ok(value),
Err(e) => GetResponse::Err(format!("{}", e)),
}),
Request::Set { key, value } => sent_resp!(match engine.set(key, value) {
Ok(_) => SetResponse::Ok(()),
Err(e) => SetResponse::Err(format!("{}", e)),
}),
Request::Remove { key } => sent_resp!(match engine.remove(key) {
Ok(_) => RemoveResponse::Ok(()),
Err(e) => RemoveResponse::Err(format!("{}", e)),
}),
};
}
Ok(())
}
| 33.248062 | 96 | 0.514339 |
1cc46b0e9aac70e52d929ee37057d028d36cc55f | 7,996 | // mod rttrace;
use crate::rttrace::{Data,init,trace};
use nanorand::{Rng, WyRand};
pub fn quick_sort_rt(arr: &mut Vec<i32>) -> Data {
let mut data = init();
let low = 0;
let high = arr.len() as i32;
quick_sort_helper_rt(arr, low, high - 1, &mut data);
data
}
fn quick_sort_helper_rt(arr: &mut Vec<i32>, low: i32, high: i32, data: &mut Data) {
trace("READ\tlow".to_string(), data);
trace("READ\thigh".to_string(), data);
if low < high {
trace("WRITE\tpivot".to_string(), data);
let pivot = partition_rt(arr, low, high, data);
trace("READ\tpivot".to_string(), data);
trace("READ\tarr".to_string(), data);
trace("READ\tlow".to_string(), data);
quick_sort_helper_rt(arr, low, pivot - 1, data);
trace("READ\tpivot".to_string(), data);
trace("READ\tarr".to_string(), data);
trace("READ\tlow".to_string(), data);
quick_sort_helper_rt(arr, pivot + 1, high, data);
}
}
fn partition_rt(arr: &mut Vec<i32>, low: i32, high: i32, data: &mut Data) -> i32 {
trace("READ\thigh".to_string(), data);
trace("WRITE\tpivot".to_string(), data);
let pivot = high; //rng(low,high);
trace("READ\tlow".to_string(), data);
trace("WRITE\tindex".to_string(), data);
let mut index = low - 1;
trace("READ\thigh".to_string(), data);
trace("WRITE\tlast".to_string(), data);
let mut last = high;
loop {
trace("READ\tindex".to_string(), data);
trace("WRITE\tindex".to_string(), data);
index += 1;
trace("READ\tarr[{".to_string() + &index.to_string() + "}]", data);
trace("READ\tarr[{".to_string() + &pivot.to_string() + "}]", data);
while arr[index as usize] < arr[pivot as usize] {
trace("READ\tindex".to_string(), data);
trace("WRITE\tindex".to_string(), data);
index += 1;
trace("READ\tindex".to_string(), data);
trace("READ\tpivot".to_string(), data);
trace("READ\tarr[{".to_string() + &index.to_string() + "}]", data);
trace("READ\tarr[{".to_string() + &pivot.to_string() + "}]", data);
}
trace("READ\tlast".to_string(), data);
trace("WRITE\tlast".to_string(), data);
last -= 1;
trace("READ\tlast".to_string(), data);
trace("READ\tarr[{".to_string() + &last.to_string() + "}]", data);
trace("READ\tarr[{".to_string() + &pivot.to_string() + "}]", data);
while last >= 0 && arr[last as usize] > arr[pivot as usize] {
trace("READ\tindex".to_string(), data);
trace("WRITE\tindex".to_string(), data);
last -= 1;
trace("READ\tlast".to_string(), data);
trace("READ\tpivot".to_string(), data);
trace("READ\tarr[{".to_string() + &last.to_string() + "}]", data);
trace("READ\tarr[{".to_string() + &pivot.to_string() + "}]", data);
}
trace("READ\tindex".to_string(), data);
trace("READ\tlast".to_string(), data);
if index >= last {
break;
} else {
trace("READ\tindex".to_string(), data);
trace("WRITE\tlast".to_string(), data);
trace("READ\tarr[{".to_string() + &index.to_string() + "}]", data);
trace("READ\tarr[{".to_string() + &last.to_string() + "}]", data);
trace("WRITE\tarr[{".to_string() + &index.to_string() + "}]", data);
trace("WRITE\tarr[{".to_string() + &last.to_string() + "}]", data);
arr.swap(index as usize, last as usize);
}
}
trace("READ\tindex".to_string(), data);
trace("WRITE\tpivot".to_string(), data);
trace("READ\tarr[{".to_string() + &index.to_string() + "}]", data);
trace("READ\tarr[{".to_string() + &pivot.to_string() + "}]", data);
trace("WRITE\tarr[{".to_string() + &index.to_string() + "}]", data);
trace("WRITE\tarr[{".to_string() + &pivot.to_string() + "}]", data);
arr.swap(index as usize, pivot as usize);
index
}
// pub fn quick_sort(arr: &mut Vec<i32>, file_path: &str) {
// let file_path = file_path;
// let logfile = FileAppender::builder()
// .encoder(Box::new(PatternEncoder::new("{m}\n")))
// .build(file_path)
// .unwrap();
// let config = Config::builder()
// .appender(Appender::builder().build("trace", Box::new(logfile)))
// .build(Root::builder().appender("trace").build(LevelFilter::Trace))
// .unwrap();
// let _handle = log4rs::init_config(config);
// let low = 0;
// let high = arr.len() as i32;
// quick_sort_helper(arr, low, high - 1);
// }
// fn quick_sort_helper(arr: &mut Vec<i32>, low: i32, high: i32) {
// trace!("READ\tlow");
// trace!("READ\thigh");
// if low < high {
// trace!("WRITE\tpivot");
// let pivot = partition(arr, low, high);
// trace!("READ\tpivot");
// trace!("READ\tarr");
// trace!("READ\tlow");
// quick_sort_helper(arr, low, pivot - 1);
// trace!("READ\tpivot");
// trace!("READ\tarr");
// trace!("READ\tlow");
// quick_sort_helper(arr, pivot + 1, high);
// }
// }
// fn partition(arr: &mut Vec<i32>, low: i32, high: i32) -> i32 {
// trace!("READ\thigh");
// trace!("WRITE\tpivot");
// let pivot = high; //rng(low,high);
// trace!("READ\tlow");
// trace!("WRITE\tindex");
// let mut index = low - 1;
// trace!("READ\thigh");
// trace!("WRITE\tlast");
// let mut last = high;
// loop {
// trace!("READ\tindex");
// trace!("WRITE\tindex");
// index += 1;
// trace!("READ\tarr[{}]", index);
// trace!("READ\tarr[{}]", pivot);
// while arr[index as usize] < arr[pivot as usize] {
// trace!("READ\tindex");
// trace!("WRITE\tindex");
// index += 1;
// trace!("READ\tindex");
// trace!("READ\tpivot");
// trace!("READ\tarr[{}]", index);
// trace!("READ\tarr[{}]", pivot);
// }
// trace!("READ\tlast");
// trace!("WRITE\tlast");
// last -= 1;
// trace!("READ\tlast");
// trace!("READ\tarr[{}]", last);
// trace!("READ\tarr[{}]", pivot);
// while last >= 0 && arr[last as usize] > arr[pivot as usize] {
// trace!("READ\tindex");
// trace!("WRITE\tindex");
// last -= 1;
// trace!("READ\tlast");
// trace!("READ\tpivot");
// trace!("READ\tarr[{}]", last);
// trace!("READ\tarr[{}]", pivot);
// }
// trace!("READ\tindex");
// trace!("READ\tlast");
// if index >= last {
// break;
// } else {
// trace!("READ\tindex");
// trace!("WRITE\tlast");
// trace!("READ\tarr[{}]", index);
// trace!("READ\tarr[{}]", last);
// trace!("WRITE\tarr[{}]", index);
// trace!("WRITE\tarr[{}]", last);
// arr.swap(index as usize, last as usize);
// }
// }
// trace!("READ\tindex");
// trace!("WRITE\tpivot");
// trace!("READ\tarr[{}]", index);
// trace!("READ\tarr[{}]", pivot);
// trace!("WRITE\tarr[{}]", index);
// trace!("WRITE\tarr[{}]", pivot);
// arr.swap(index as usize, pivot as usize);
// index
// }
#[allow(dead_code)]
fn rng(low: i32, high: i32) -> i32 {
let mut rng = WyRand::new();
rng.generate_range(low..=high)
}
pub fn init_arr(size: usize) -> Vec<i32> {
let mut arr = Vec::new();
let mut rng = nanorand::tls_rng();
let low: i32 = -(size as i32) / 2;
let high: i32 = size as i32 / 2;
for _i in 0..size {
arr.push(rng.generate_range(low..=high));
}
arr
}
pub fn shuffle(arr: &mut Vec<i32>) -> Vec<i32> {
let mut rng = WyRand::new();
rng.shuffle(arr.clone());
arr.to_vec().clone()
} | 32.504065 | 83 | 0.510005 |
0eafb9e4375dc38e08c035fd9e550c4c9abde20b | 10,724 | //! For either helper, see [`Either`].
use std::{
future::Future,
mem,
pin::Pin,
task::{Context, Poll},
};
use bytes::Bytes;
use futures_core::ready;
use pin_project_lite::pin_project;
use crate::{
body::EitherBody,
dev,
web::{Form, Json},
Error, FromRequest, HttpRequest, HttpResponse, Responder,
};
/// Combines two extractor or responder types into a single type.
///
/// # Extractor
/// Provides a mechanism for trying two extractors, a primary and a fallback. Useful for
/// "polymorphic payloads" where, for example, a form might be JSON or URL encoded.
///
/// It is important to note that this extractor, by necessity, buffers the entire request payload
/// as part of its implementation. Though, it does respect any `PayloadConfig` maximum size limits.
///
/// ```
/// use actix_web::{post, web, Either};
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct Info {
/// name: String,
/// }
///
/// // handler that accepts form as JSON or form-urlencoded.
/// #[post("/")]
/// async fn index(form: Either<web::Json<Info>, web::Form<Info>>) -> String {
/// let name: String = match form {
/// Either::Left(json) => json.name.to_owned(),
/// Either::Right(form) => form.name.to_owned(),
/// };
///
/// format!("Welcome {}!", name)
/// }
/// ```
///
/// # Responder
/// It may be desireable to use a concrete type for a response with multiple branches. As long as
/// both types implement `Responder`, so will the `Either` type, enabling it to be used as a
/// handler's return type.
///
/// All properties of a response are determined by the Responder branch returned.
///
/// ```
/// use actix_web::{get, Either, Error, HttpResponse};
///
/// #[get("/")]
/// async fn index() -> Either<&'static str, Result<HttpResponse, Error>> {
/// if 1 == 2 {
/// // respond with Left variant
/// Either::Left("Bad data")
/// } else {
/// // respond with Right variant
/// Either::Right(
/// Ok(HttpResponse::Ok()
/// .content_type(mime::TEXT_HTML)
/// .body("<p>Hello!</p>"))
/// )
/// }
/// }
/// ```
#[derive(Debug, PartialEq)]
pub enum Either<L, R> {
/// A value of type `L`.
Left(L),
/// A value of type `R`.
Right(R),
}
impl<T> Either<Form<T>, Json<T>> {
pub fn into_inner(self) -> T {
match self {
Either::Left(form) => form.into_inner(),
Either::Right(form) => form.into_inner(),
}
}
}
impl<T> Either<Json<T>, Form<T>> {
pub fn into_inner(self) -> T {
match self {
Either::Left(form) => form.into_inner(),
Either::Right(form) => form.into_inner(),
}
}
}
#[cfg(test)]
impl<L, R> Either<L, R> {
pub(self) fn unwrap_left(self) -> L {
match self {
Either::Left(data) => data,
Either::Right(_) => {
panic!("Cannot unwrap Left branch. Either contains an `R` type.")
}
}
}
pub(self) fn unwrap_right(self) -> R {
match self {
Either::Left(_) => {
panic!("Cannot unwrap Right branch. Either contains an `L` type.")
}
Either::Right(data) => data,
}
}
}
/// See [here](#responder) for example of usage as a handler return type.
impl<L, R> Responder for Either<L, R>
where
L: Responder,
R: Responder,
{
type Body = EitherBody<L::Body, R::Body>;
fn respond_to(self, req: &HttpRequest) -> HttpResponse<Self::Body> {
match self {
Either::Left(a) => a.respond_to(req).map_into_left_body(),
Either::Right(b) => b.respond_to(req).map_into_right_body(),
}
}
}
/// A composite error resulting from failure to extract an `Either<L, R>`.
///
/// The implementation of `Into<actix_web::Error>` will return the payload buffering error or the
/// error from the primary extractor. To access the fallback error, use a match clause.
#[derive(Debug)]
pub enum EitherExtractError<L, R> {
/// Error from payload buffering, such as exceeding payload max size limit.
Bytes(Error),
/// Error from primary and fallback extractors.
Extract(L, R),
}
impl<L, R> From<EitherExtractError<L, R>> for Error
where
L: Into<Error>,
R: Into<Error>,
{
fn from(err: EitherExtractError<L, R>) -> Error {
match err {
EitherExtractError::Bytes(err) => err,
EitherExtractError::Extract(a_err, _b_err) => a_err.into(),
}
}
}
/// See [here](#extractor) for example of usage as an extractor.
impl<L, R> FromRequest for Either<L, R>
where
L: FromRequest + 'static,
R: FromRequest + 'static,
{
type Error = EitherExtractError<L::Error, R::Error>;
type Future = EitherExtractFut<L, R>;
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
EitherExtractFut {
req: req.clone(),
state: EitherExtractState::Bytes {
bytes: Bytes::from_request(req, payload),
},
}
}
}
pin_project! {
pub struct EitherExtractFut<L, R>
where
R: FromRequest,
L: FromRequest,
{
req: HttpRequest,
#[pin]
state: EitherExtractState<L, R>,
}
}
pin_project! {
#[project = EitherExtractProj]
pub enum EitherExtractState<L, R>
where
L: FromRequest,
R: FromRequest,
{
Bytes {
#[pin]
bytes: <Bytes as FromRequest>::Future,
},
Left {
#[pin]
left: L::Future,
fallback: Bytes,
},
Right {
#[pin]
right: R::Future,
left_err: Option<L::Error>,
},
}
}
impl<R, RF, RE, L, LF, LE> Future for EitherExtractFut<L, R>
where
L: FromRequest<Future = LF, Error = LE>,
R: FromRequest<Future = RF, Error = RE>,
LF: Future<Output = Result<L, LE>> + 'static,
RF: Future<Output = Result<R, RE>> + 'static,
LE: Into<Error>,
RE: Into<Error>,
{
type Output = Result<Either<L, R>, EitherExtractError<LE, RE>>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.project();
let ready = loop {
let next = match this.state.as_mut().project() {
EitherExtractProj::Bytes { bytes } => {
let res = ready!(bytes.poll(cx));
match res {
Ok(bytes) => {
let fallback = bytes.clone();
let left =
L::from_request(this.req, &mut payload_from_bytes(bytes));
EitherExtractState::Left { left, fallback }
}
Err(err) => break Err(EitherExtractError::Bytes(err)),
}
}
EitherExtractProj::Left { left, fallback } => {
let res = ready!(left.poll(cx));
match res {
Ok(extracted) => break Ok(Either::Left(extracted)),
Err(left_err) => {
let right = R::from_request(
this.req,
&mut payload_from_bytes(mem::take(fallback)),
);
EitherExtractState::Right {
left_err: Some(left_err),
right,
}
}
}
}
EitherExtractProj::Right { right, left_err } => {
let res = ready!(right.poll(cx));
match res {
Ok(data) => break Ok(Either::Right(data)),
Err(err) => {
break Err(EitherExtractError::Extract(
left_err.take().unwrap(),
err,
));
}
}
}
};
this.state.set(next);
};
Poll::Ready(ready)
}
}
fn payload_from_bytes(bytes: Bytes) -> dev::Payload {
let (_, mut h1_payload) = actix_http::h1::Payload::create(true);
h1_payload.unread_data(bytes);
dev::Payload::from(h1_payload)
}
#[cfg(test)]
mod tests {
use serde::{Deserialize, Serialize};
use super::*;
use crate::{
test::TestRequest,
web::{Form, Json},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
struct TestForm {
hello: String,
}
#[actix_rt::test]
async fn test_either_extract_first_try() {
let (req, mut pl) = TestRequest::default()
.set_form(&TestForm {
hello: "world".to_owned(),
})
.to_http_parts();
let form = Either::<Form<TestForm>, Json<TestForm>>::from_request(&req, &mut pl)
.await
.unwrap()
.unwrap_left()
.into_inner();
assert_eq!(&form.hello, "world");
}
#[actix_rt::test]
async fn test_either_extract_fallback() {
let (req, mut pl) = TestRequest::default()
.set_json(&TestForm {
hello: "world".to_owned(),
})
.to_http_parts();
let form = Either::<Form<TestForm>, Json<TestForm>>::from_request(&req, &mut pl)
.await
.unwrap()
.unwrap_right()
.into_inner();
assert_eq!(&form.hello, "world");
}
#[actix_rt::test]
async fn test_either_extract_recursive_fallback() {
let (req, mut pl) = TestRequest::default()
.set_payload(Bytes::from_static(b"!@$%^&*()"))
.to_http_parts();
let payload = Either::<Either<Form<TestForm>, Json<TestForm>>, Bytes>::from_request(
&req, &mut pl,
)
.await
.unwrap()
.unwrap_right();
assert_eq!(&payload.as_ref(), &b"!@$%^&*()");
}
#[actix_rt::test]
async fn test_either_extract_recursive_fallback_inner() {
let (req, mut pl) = TestRequest::default()
.set_json(&TestForm {
hello: "world".to_owned(),
})
.to_http_parts();
let form = Either::<Either<Form<TestForm>, Json<TestForm>>, Bytes>::from_request(
&req, &mut pl,
)
.await
.unwrap()
.unwrap_left()
.unwrap_right()
.into_inner();
assert_eq!(&form.hello, "world");
}
}
| 28.983784 | 99 | 0.512868 |
26c2e8b8f590ef4b634c8831ef173623c1f0d4f0 | 7,725 | mod proposer_block;
mod transaction;
mod voter_block;
use crate::block::{Block, Content};
use crate::blockchain::BlockChain;
use crate::blockdb::BlockDatabase;
use crate::config::*;
use crate::crypto::hash::{Hashable, H256};
use crate::crypto::merkle::verify;
extern crate bigint;
/// The result of block validation.
#[derive(Debug)]
pub enum BlockResult {
/// The validation passes.
Pass,
/// The PoW doesn't pass.
WrongPoW,
/// The sortition id and content type doesn't match.
WrongSortitionId,
/// The content Merkle proof is incorrect.
WrongSortitionProof(H256, H256, Vec<H256>, usize, usize),
/// Some references are missing.
MissingReferences(Vec<H256>),
/// Proposer Ref level > parent
WrongProposerRef,
/// A voter block has a out-of-range chain number.
WrongChainNumber,
/// A voter block votes for incorrect proposer levels.
WrongVoteLevel,
EmptyTransaction,
ZeroValue,
InsufficientInput,
WrongSignature,
}
impl std::fmt::Display for BlockResult {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
BlockResult::Pass => write!(f, "validation passed"),
BlockResult::WrongPoW => write!(f, "PoW larger than difficulty"),
BlockResult::WrongSortitionId => write!(f, "Sortition id is not same as content type"),
BlockResult::WrongSortitionProof(root, data, proof, id, leaf_size) => {
write!(f, "Sortition Merkle proof is incorrect\n")?;
write!(f, "Root {}\n", root)?;
write!(f, "Hash {}\n", data)?;
for h in proof.iter() {
write!(f, "Proof {}\n", h)?;
}
write!(f, "Id {}, Leaf size{}\n", id, leaf_size)
},
BlockResult::MissingReferences(_) => write!(f, "referred blocks not in system"),
BlockResult::WrongProposerRef => {
write!(f, "referred proposer blocks level larger than parent")
}
BlockResult::WrongChainNumber => write!(f, "chain number out of range"),
BlockResult::WrongVoteLevel => write!(f, "incorrent vote levels"),
BlockResult::EmptyTransaction => write!(f, "empty transaction input or output"),
BlockResult::ZeroValue => {
write!(f, "transaction input or output value contains a zero")
}
BlockResult::InsufficientInput => write!(f, "insufficient input"),
BlockResult::WrongSignature => write!(f, "signature mismatch"),
}
}
}
// check PoW and sortition id
pub fn check_pow_sortition_id(block: &Block, config: &BlockchainConfig) -> BlockResult {
let sortition_id = config.sortition_hash(&block.hash(), &block.header.difficulty);
if let Some(sortition_id) = sortition_id {
let correct_sortition_id = match &block.content {
Content::Proposer(_) => PROPOSER_INDEX,
Content::Transaction(_) => TRANSACTION_INDEX,
Content::Voter(content) => content.chain_number + FIRST_VOTER_INDEX,
};
if sortition_id != correct_sortition_id {
return BlockResult::WrongSortitionId;
}
} else {
return BlockResult::WrongPoW;
}
BlockResult::Pass
}
/// check sortition proof
pub fn check_sortition_proof(block: &Block, config: &BlockchainConfig) -> BlockResult {
let sortition_id = config.sortition_hash(&block.hash(), &block.header.difficulty);
if let Some(sortition_id) = sortition_id {
if !verify(
&block.header.content_merkle_root,
&block.content.hash(),
&block.sortition_proof,
sortition_id as usize,
(config.voter_chains + FIRST_VOTER_INDEX) as usize,
) {
return BlockResult::WrongSortitionProof(block.header.content_merkle_root, block.content.hash(), block.sortition_proof.clone(), sortition_id as usize, (config.voter_chains + FIRST_VOTER_INDEX) as usize);
}
} else {
unreachable!();
}
BlockResult::Pass
}
/// Validate a block that already passes pow and sortition test. See if parents/refs are missing.
pub fn check_data_availability(
block: &Block,
blockchain: &BlockChain,
blockdb: &BlockDatabase,
) -> BlockResult {
let mut missing = vec![];
// check whether the parent exists
let parent = block.header.parent;
let parent_availability = check_proposer_block_exists(parent, blockchain);
if !parent_availability {
missing.push(parent);
}
// match the block type and check content
match &block.content {
Content::Proposer(content) => {
// check for missing references
let missing_refs =
proposer_block::get_missing_references(&content, blockchain, blockdb);
if !missing_refs.is_empty() {
missing.extend_from_slice(&missing_refs);
}
}
Content::Voter(content) => {
// check for missing references
let missing_refs = voter_block::get_missing_references(&content, blockchain, blockdb);
if !missing_refs.is_empty() {
missing.extend_from_slice(&missing_refs);
}
}
Content::Transaction(_) => {
// note that we don't care about blockdb here, since all blocks at this stage
// should have been inserted into the blockdb
}
}
if !missing.is_empty() {
BlockResult::MissingReferences(missing)
} else {
BlockResult::Pass
}
}
/// Check block content semantic
pub fn check_content_semantic(
block: &Block,
blockchain: &BlockChain,
_blockdb: &BlockDatabase,
) -> BlockResult {
let parent = block.header.parent;
match &block.content {
Content::Proposer(content) => {
// check refed proposer level should be less than its level
if !proposer_block::check_ref_proposer_level(&parent, &content, blockchain) {
return BlockResult::WrongProposerRef;
}
BlockResult::Pass
}
Content::Voter(content) => {
// check chain number
if !voter_block::check_chain_number(&content, blockchain) {
return BlockResult::WrongChainNumber;
}
// check whether all proposer levels deeper than the one our parent voted are voted
if !voter_block::check_levels_voted(&content, blockchain, &parent) {
return BlockResult::WrongVoteLevel;
}
BlockResult::Pass
}
Content::Transaction(content) => {
if !transaction::check_signature_batch(&content.transactions) {
return BlockResult::WrongSignature;
}
BlockResult::Pass
}
}
}
/// Check whether a proposer block exists in the block database and the blockchain.
fn check_proposer_block_exists(hash: H256, blockchain: &BlockChain) -> bool {
match blockchain.contains_proposer(&hash) {
Err(e) => panic!("Blockchain error {}", e),
Ok(b) => b,
}
}
/// Check whether a voter block exists in the block database and the blockchain.
fn check_voter_block_exists(hash: H256, blockchain: &BlockChain) -> bool {
match blockchain.contains_voter(&hash) {
Err(e) => panic!("Blockchain error {}", e),
Ok(b) => b,
}
}
/// Check whether a transaction block exists in the block database.
fn check_transaction_block_exists(hash: H256, blockchain: &BlockChain) -> bool {
match blockchain.contains_transaction(&hash) {
Err(e) => panic!("Blockchain error {}", e),
Ok(b) => b,
}
}
| 37.318841 | 214 | 0.619288 |
fc16dddb928217f5f83c652b9af619433d4f073d | 3,870 | //! Trap codes describing the reason for a trap.
use std::fmt::{self, Display, Formatter};
use std::str::FromStr;
/// A trap code describing the reason for a trap.
///
/// All trap instructions have an explicit trap code.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum TrapCode {
/// The current stack space was exhausted.
///
/// On some platforms, a stack overflow may also be indicated by a segmentation fault from the
/// stack guard page.
StackOverflow,
/// A `heap_addr` instruction detected an out-of-bounds error.
///
/// Note that not all out-of-bounds heap accesses are reported this way;
/// some are detected by a segmentation fault on the heap unmapped or
/// offset-guard pages.
HeapOutOfBounds,
/// A `table_addr` instruction detected an out-of-bounds error.
TableOutOfBounds,
/// Other bounds checking error.
OutOfBounds,
/// Indirect call to a null table entry.
IndirectCallToNull,
/// Signature mismatch on indirect call.
BadSignature,
/// An integer arithmetic operation caused an overflow.
IntegerOverflow,
/// An integer division by zero.
IntegerDivisionByZero,
/// Failed float-to-int conversion.
BadConversionToInteger,
/// Execution has potentially run too long and may be interrupted.
/// This trap is resumable.
Interrupt,
/// A user-defined trap code.
User(u16),
}
impl Display for TrapCode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
use self::TrapCode::*;
let identifier = match *self {
StackOverflow => "stk_ovf",
HeapOutOfBounds => "heap_oob",
TableOutOfBounds => "table_oob",
OutOfBounds => "oob",
IndirectCallToNull => "icall_null",
BadSignature => "bad_sig",
IntegerOverflow => "int_ovf",
IntegerDivisionByZero => "int_divz",
BadConversionToInteger => "bad_toint",
Interrupt => "interrupt",
User(x) => return write!(f, "user{}", x),
};
f.write_str(identifier)
}
}
impl FromStr for TrapCode {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
use self::TrapCode::*;
match s {
"stk_ovf" => Ok(StackOverflow),
"heap_oob" => Ok(HeapOutOfBounds),
"table_oob" => Ok(TableOutOfBounds),
"oob" => Ok(OutOfBounds),
"icall_null" => Ok(IndirectCallToNull),
"bad_sig" => Ok(BadSignature),
"int_ovf" => Ok(IntegerOverflow),
"int_divz" => Ok(IntegerDivisionByZero),
"bad_toint" => Ok(BadConversionToInteger),
"interrupt" => Ok(Interrupt),
_ if s.starts_with("user") => s[4..].parse().map(User).map_err(|_| ()),
_ => Err(()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::string::ToString;
// Everything but user-defined codes.
const CODES: [TrapCode; 9] = [
TrapCode::StackOverflow,
TrapCode::HeapOutOfBounds,
TrapCode::TableOutOfBounds,
TrapCode::OutOfBounds,
TrapCode::IndirectCallToNull,
TrapCode::BadSignature,
TrapCode::IntegerOverflow,
TrapCode::IntegerDivisionByZero,
TrapCode::BadConversionToInteger,
];
#[test]
fn display() {
for r in &CODES {
let tc = *r;
assert_eq!(tc.to_string().parse(), Ok(tc));
}
assert_eq!("bogus".parse::<TrapCode>(), Err(()));
assert_eq!(TrapCode::User(17).to_string(), "user17");
assert_eq!("user22".parse(), Ok(TrapCode::User(22)));
assert_eq!("user".parse::<TrapCode>(), Err(()));
assert_eq!("user-1".parse::<TrapCode>(), Err(()));
assert_eq!("users".parse::<TrapCode>(), Err(()));
}
}
| 30.234375 | 98 | 0.586305 |
cca40a47c669f31369116dc1b80b2f0cf9e1d07b | 328 | foreign_enum!(
enum ControlItem {
GNSS = ControlItem::GnssWorking,
GPS_PROVIDER = ControlItem::AndroidGPSOn,
}
);
foreign_interface!(interface ControlStateObserver {
self_type ControlStateChange;
onSessionUpdate = ControlStateChange::on_state_changed(&self, item: ControlItem, is_ok: bool);
});
| 27.333333 | 98 | 0.72561 |
f58446d55922239f1f5285ed5bda281f60237554 | 770 | // NOTE: This test doesn't actually require `fulldeps`
// so we could instead use it as an `ui` test.
//
// Considering that all other `internal-lints` are tested here
// this seems like the cleaner solution though.
#![feature(rustc_attrs)]
#![deny(rustc::ty_pass_by_reference)]
#![allow(unused)]
#[rustc_diagnostic_item = "TyCtxt"]
struct TyCtxt<'tcx> {
inner: &'tcx (),
}
impl<'tcx> TyCtxt<'tcx> {
fn by_value(self) {} // OK
fn by_ref(&self) {} //~ ERROR passing `TyCtxt<'tcx>` by reference
}
struct TyS<'tcx> {
inner: &'tcx (),
}
#[rustc_diagnostic_item = "Ty"]
type Ty<'tcx> = &'tcx TyS<'tcx>;
impl<'tcx> TyS<'tcx> {
fn by_value(self: Ty<'tcx>) {}
fn by_ref(self: &Ty<'tcx>) {} //~ ERROR passing `Ty<'tcx>` by reference
}
fn main() {}
| 22.647059 | 75 | 0.631169 |
8af53fa357252ce52b87c8fe942c66d8433cdc0c | 1,853 | // An imaginary magical school has a new report card generation system written in Rust!
// Currently the system only supports creating report cards where the student's grade
// is represented numerically (e.g. 1.0 -> 5.5).
// However, the school also issues alphabetical grades (A+ -> F-) and needs
// to be able to print both types of report card!
// Make the necessary code changes in the struct ReportCard and the impl block
// to support alphabetical report cards. Change the Grade in the second test to "A+"
// to show that your changes allow alphabetical grades.
// Execute 'rustlings hint generics3' for hints!
/*
pub struct ReportCard {
pub grade: f32,
pub student_name: String,
pub student_age: u8,
}*/
pub struct ReportCard<T> {
pub grade: T,
pub student_name: String,
pub student_age: u8,
}
impl<T: std::fmt::Display> ReportCard<T> {
pub fn print(&self) -> String {
format!("{} ({}) - achieved a grade of {}",
&self.student_name, &self.student_age, &self.grade)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn generate_numeric_report_card() {
let report_card = ReportCard {
grade: 2.1,
student_name: "Tom Wriggle".to_string(),
student_age: 12,
};
assert_eq!(
report_card.print(),
"Tom Wriggle (12) - achieved a grade of 2.1"
);
}
#[test]
fn generate_alphabetic_report_card() {
// TODO: Make sure to change the grade here after you finish the exercise.
let report_card = ReportCard {
grade: "A+".to_string(),
student_name: "Gary Plotter".to_string(),
student_age: 11,
};
assert_eq!(
report_card.print(),
"Gary Plotter (11) - achieved a grade of A+"
);
}
}
| 28.507692 | 87 | 0.610901 |
9c0edd2035ebff57a437635595436e928c28fbb0 | 2,121 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
#[cfg(any(feature = "v2_6", feature = "dox"))]
use glib::translate::*;
use webkit2_sys;
#[cfg(any(feature = "v2_6", feature = "dox"))]
use UserContentInjectedFrames;
#[cfg(any(feature = "v2_6", feature = "dox"))]
use UserStyleLevel;
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct UserStyleSheet(Shared<webkit2_sys::WebKitUserStyleSheet>);
match fn {
ref => |ptr| webkit2_sys::webkit_user_style_sheet_ref(ptr),
unref => |ptr| webkit2_sys::webkit_user_style_sheet_unref(ptr),
get_type => || webkit2_sys::webkit_user_style_sheet_get_type(),
}
}
impl UserStyleSheet {
#[cfg(any(feature = "v2_6", feature = "dox"))]
pub fn new(
source: &str,
injected_frames: UserContentInjectedFrames,
level: UserStyleLevel,
allow_list: &[&str],
block_list: &[&str],
) -> UserStyleSheet {
assert_initialized_main_thread!();
unsafe {
from_glib_full(webkit2_sys::webkit_user_style_sheet_new(
source.to_glib_none().0,
injected_frames.to_glib(),
level.to_glib(),
allow_list.to_glib_none().0,
block_list.to_glib_none().0,
))
}
}
#[cfg(any(feature = "v2_22", feature = "dox"))]
pub fn new_for_world(
source: &str,
injected_frames: UserContentInjectedFrames,
level: UserStyleLevel,
world_name: &str,
allow_list: &[&str],
block_list: &[&str],
) -> UserStyleSheet {
assert_initialized_main_thread!();
unsafe {
from_glib_full(webkit2_sys::webkit_user_style_sheet_new_for_world(
source.to_glib_none().0,
injected_frames.to_glib(),
level.to_glib(),
world_name.to_glib_none().0,
allow_list.to_glib_none().0,
block_list.to_glib_none().0,
))
}
}
}
| 31.656716 | 78 | 0.590759 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.