hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
3a90b41420af7a0a72e74b0cf4b6088dcfc1bf0d | 35,408 | /*
* Copyright 2018-2020 TON DEV SOLUTIONS LTD.
*
* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use
* this file except in compliance with the License.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific TON DEV software governing permissions and
* limitations under the License.
*/
use crate::{print_args, VERBOSE_MODE};
use crate::config::Config;
use crate::convert;
use crate::depool_abi::{DEPOOL_ABI, PARTICIPANT_ABI};
use crate::helpers::
{
create_client_local,
create_client_verbose,
load_abi,
load_ton_address,
now,
TonClient,
answer_filter,
events_filter,
print_message,
};
use crate::multisig::{send_with_body, MSIG_ABI};
use clap::{App, ArgMatches, SubCommand, Arg, AppSettings};
use serde_json;
use ton_client::abi::{ParamsOfEncodeMessageBody, CallSet, ParamsOfDecodeMessageBody};
use ton_client::net::{OrderBy, ParamsOfQueryCollection, ParamsOfWaitForCollection, SortDirection};
use crate::call::{prepare_message, print_encoded_message};
use ton_client::processing::{
ParamsOfSendMessage,
ParamsOfWaitForTransaction,
wait_for_transaction,
send_message,
};
use std::collections::HashMap;
pub fn create_depool_command<'a, 'b>() -> App<'a, 'b> {
let wallet_arg = Arg::with_name("MSIG")
.takes_value(true)
.long("--wallet")
.short("-w")
.help("Multisig wallet address.");
let value_arg = Arg::with_name("VALUE")
.takes_value(true)
.long("--value")
.short("-v")
.help("Value in tons.");
let keys_arg = Arg::with_name("SIGN")
.takes_value(true)
.long("--sign")
.short("-s")
.help("Seed phrase or path to file with keypair which must be used to sign message to multisig wallet.");
let total_period_arg = Arg::with_name("TPERIOD")
.takes_value(true)
.long("--total")
.short("-t")
.help("Total period of vesting stake (days).");
let withdrawal_period_arg = Arg::with_name("WPERIOD")
.takes_value(true)
.long("--withdrawal")
.short("-i")
.help("Payment period of vesting stake (days).");
let beneficiary_arg = Arg::with_name("BENEFICIARY")
.takes_value(true)
.long("--beneficiary")
.short("-b")
.help("Smart contract address which will own lock stake rewards.");
let donor_arg = Arg::with_name("DONOR")
.takes_value(true)
.long("--donor")
.short("-d")
.help("Donor smart contract address.");
let dest_arg = Arg::with_name("DEST")
.takes_value(true)
.long("--dest")
.short("-d")
.help("Address of the destination smart contract.");
let wait_answer = Arg::with_name("WAIT_ANSWER")
.long("--wait-answer")
.short("-a")
.help("Wait for depool answer when calling a depool function.");
SubCommand::with_name("depool")
.about("DePool commands.")
.setting(AppSettings::AllowLeadingHyphen)
.setting(AppSettings::DontCollapseArgsInUsage)
.arg(Arg::with_name("ADDRESS")
.takes_value(true)
.long("--addr")
.help("DePool contract address. If parameter is omitted, then value `addr` from the config is used"))
.arg(wait_answer.clone())
.subcommand(SubCommand::with_name("donor")
.about(r#"Top level command for specifying donor for exotic stakes in depool."#)
.subcommand(SubCommand::with_name("vesting")
.about("Set the address from which participant can receive a vesting stake.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(keys_arg.clone())
.arg(donor_arg.clone())
.arg(wait_answer.clone()))
.subcommand(SubCommand::with_name("lock")
.about("Set the address from which participant can receive a lock stake.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(keys_arg.clone())
.arg(donor_arg.clone())
.arg(wait_answer.clone())))
.subcommand(SubCommand::with_name("answers")
.about("Prints depool answers to the wallet")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(Arg::with_name("SINCE")
.takes_value(true)
.long("--since")
.short("-s")
.help("Prints answers since this unixtime.")) )
.subcommand(SubCommand::with_name("stake")
.about(r#"Top level command for managing stakes in depool. Uses a supplied multisignature wallet to send internal message with stake to depool."#)
.subcommand(SubCommand::with_name("ordinary")
.about("Deposits an ordinary stake in the depool from the multisignature wallet.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(keys_arg.clone())
.arg(wait_answer.clone()))
.subcommand(SubCommand::with_name("vesting")
.about("Deposits a vesting stake in the depool from the multisignature wallet.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(keys_arg.clone())
.arg(wait_answer.clone())
.arg(total_period_arg.clone())
.arg(withdrawal_period_arg.clone())
.arg(beneficiary_arg.clone()))
.subcommand(SubCommand::with_name("lock")
.about("Deposits a lock stake in the depool from the multisignature wallet.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(keys_arg.clone())
.arg(wait_answer.clone())
.arg(total_period_arg.clone())
.arg(withdrawal_period_arg.clone())
.arg(beneficiary_arg.clone()))
.subcommand(SubCommand::with_name("transfer")
.about("Transfers ownership of the wallet stake to another contract.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(keys_arg.clone())
.arg(wait_answer.clone())
.arg(dest_arg.clone()))
.subcommand(SubCommand::with_name("remove")
.about("Withdraws an ordinary stake from the current pooling round of the depool to the multisignature wallet.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(keys_arg.clone())
.arg(wait_answer.clone()))
.subcommand(SubCommand::with_name("withdrawPart")
.about("Withdraws part of the stake after round completion.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(wait_answer.clone())
.arg(keys_arg.clone())))
.subcommand(SubCommand::with_name("replenish")
.about("Transfers funds from the multisignature wallet to the depool contract (NOT A STAKE).")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(value_arg.clone())
.arg(keys_arg.clone()))
.subcommand(SubCommand::with_name("ticktock")
.about("Calls depool 'ticktock()' function to update its state. 1 ton is attached to this call (change will be returned).")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(keys_arg.clone()))
.subcommand(SubCommand::with_name("withdraw")
.about("Allows to disable auto investment of the stake into the next round and withdraw all the stakes after round completion.")
.setting(AppSettings::AllowLeadingHyphen)
.subcommand(SubCommand::with_name("on")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(wait_answer.clone())
.arg(keys_arg.clone()))
.subcommand(SubCommand::with_name("off")
.setting(AppSettings::AllowLeadingHyphen)
.arg(wallet_arg.clone())
.arg(wait_answer.clone())
.arg(keys_arg.clone())))
.subcommand(SubCommand::with_name("events")
.about("Prints depool events.")
.setting(AppSettings::AllowLeadingHyphen)
.arg(Arg::with_name("SINCE")
.takes_value(true)
.long("--since")
.short("-s")
.help("Prints events since this unixtime."))
.arg(Arg::with_name("WAITONE")
.long("--wait-one")
.short("-w")
.help("Waits until new event will be emitted.")) )
}
struct CommandData<'a> {
conf: Config,
depool: String,
wallet: String,
keys: String,
stake: &'a str,
depool_fee: String,
}
impl<'a> CommandData<'a> {
pub fn from_matches_and_conf(m: &'a ArgMatches, conf: Config, depool: String) -> Result<Self, String> {
let (wallet, stake, keys) = parse_stake_data(m, &conf)?;
let depool_fee = conf.depool_fee.clone().to_string();
Ok(CommandData {conf, depool, wallet, stake, keys, depool_fee})
}
}
fn parse_wallet_data(m: &ArgMatches, conf: &Config) -> Result<(String, String), String> {
let wallet = m.value_of("MSIG")
.map(|s| s.to_string())
.or(conf.wallet.clone())
.ok_or("multisig wallet address is not defined.".to_string())?;
let wallet = load_ton_address(&wallet, conf)
.map_err(|e| format!("invalid multisig address: {}", e))?;
let keys = m.value_of("SIGN")
.map(|s| s.to_string())
.or(conf.keys_path.clone())
.ok_or("keypair is not defined.".to_string())?;
Ok((wallet, keys))
}
fn parse_stake_data<'a>(m: &'a ArgMatches, conf: &Config) -> Result<(String, &'a str, String), String> {
let (wallet, keys) = parse_wallet_data(m, conf)?;
let stake = m.value_of("VALUE")
.ok_or("stake value is not defined.".to_string())?;
Ok((wallet, stake, keys))
}
pub async fn depool_command(m: &ArgMatches<'_>, conf: Config) -> Result<(), String> {
let depool = m.value_of("ADDRESS")
.map(|s| s.to_string())
.or(conf.addr.clone())
.ok_or("depool address is not defined. Supply it in the config file or in command line.".to_string())?;
let depool = load_ton_address(&depool, &conf)
.map_err(|e| format!("invalid depool address: {}", e))?;
let mut conf = conf;
let mut set_wait_answer = |m: &ArgMatches| {
if m.is_present("WAIT_ANSWER") {
conf.no_answer = false;
}
};
set_wait_answer(m);
if let Some(m) = m.subcommand_matches("donor") {
let matches = m.subcommand_matches("vesting").or(m.subcommand_matches("lock"));
if let Some(matches) = matches {
let is_vesting = m.subcommand_matches("vesting").is_some();
set_wait_answer(matches);
let (wallet, keys) = parse_wallet_data(&matches, &conf)?;
return set_donor_command(matches, conf, depool.as_str(), &wallet, &keys, is_vesting).await;
}
}
if let Some(m) = m.subcommand_matches("stake") {
if let Some(m) = m.subcommand_matches("ordinary") {
set_wait_answer(m);
return ordinary_stake_command(
CommandData::from_matches_and_conf(m, conf, depool)?,
).await;
}
if let Some(m) = m.subcommand_matches("vesting") {
set_wait_answer(m);
return exotic_stake_command(m,
CommandData::from_matches_and_conf(m, conf, depool)?,
true,
).await;
}
if let Some(m) = m.subcommand_matches("lock") {
set_wait_answer(m);
return exotic_stake_command(m,
CommandData::from_matches_and_conf(m, conf, depool)?,
false,
).await;
}
if let Some(m) = m.subcommand_matches("remove") {
set_wait_answer(m);
return remove_stake_command(
CommandData::from_matches_and_conf(m, conf, depool)?,
).await;
}
if let Some(m) = m.subcommand_matches("withdrawPart") {
set_wait_answer(m);
return withdraw_stake_command(
CommandData::from_matches_and_conf(m, conf, depool)?,
).await;
}
if let Some(m) = m.subcommand_matches("transfer") {
set_wait_answer(m);
return transfer_stake_command(m,
CommandData::from_matches_and_conf(m, conf, depool)?,
).await;
}
}
if let Some(m) = m.subcommand_matches("withdraw") {
let matches = m.subcommand_matches("on").or(m.subcommand_matches("off"));
if let Some(matches) = matches {
set_wait_answer(matches);
let (wallet, keys) = parse_wallet_data(&matches, &conf)?;
let enable_withdraw = m.subcommand_matches("on").is_some();
return set_withdraw_command(conf, &depool, &wallet, &keys, enable_withdraw).await;
}
}
if let Some(m) = m.subcommand_matches("events") {
return events_command(m, conf, &depool).await
}
if let Some(m) = m.subcommand_matches("answers") {
return answer_command(m, conf, &depool).await
}
if let Some(m) = m.subcommand_matches("replenish") {
return replenish_command(
CommandData::from_matches_and_conf(m, conf, depool)?,
).await;
}
if let Some(m) = m.subcommand_matches("ticktock") {
let (wallet, keys) = parse_wallet_data(&m, &conf)?;
return ticktock_command(conf, &depool, &wallet, &keys).await;
}
Err("unknown depool command".to_owned())
}
async fn answer_command(m: &ArgMatches<'_>, conf: Config, depool: &str) -> Result<(), String> {
let wallet = m.value_of("MSIG")
.map(|s| s.to_string())
.or(conf.wallet.clone())
.ok_or("multisig wallet address is not defined.".to_string())?;
let since = m.value_of("SINCE")
.map(|s| {
u32::from_str_radix(s, 10)
.map_err(|e| format!(r#"cannot parse "since" option: {}"#, e))
})
.transpose()?
.unwrap_or(0);
let ton = create_client_verbose(&conf)?;
let wallet = load_ton_address(&wallet, &conf)
.map_err(|e| format!("invalid depool address: {}", e))?;
let messages = ton_client::net::query_collection(
ton.clone(),
ParamsOfQueryCollection {
collection: "messages".to_owned(),
filter: Some(answer_filter(depool, &wallet, since)),
result: "id value body created_at created_at_string".to_owned(),
order: Some(vec![OrderBy{ path: "created_at".to_owned(), direction: SortDirection::DESC }]),
..Default::default()
},
).await.map_err(|e| format!("failed to query depool messages: {}", e))?;
println!("{} answers found", messages.result.len());
for messages in &messages.result {
print_answer(ton.clone(), messages).await;
}
println!("Done");
Ok(())
}
async fn print_answer(ton: TonClient, message: &serde_json::Value) {
println!("Answer:");
print_message(ton, message, PARTICIPANT_ABI, true).await;
}
/*
* Events command
*/
async fn events_command(m: &ArgMatches<'_>, conf: Config, depool: &str) -> Result<(), String> {
let since = m.value_of("SINCE");
let wait_for = m.is_present("WAITONE");
let depool = Some(depool);
print_args!(depool, since);
if !wait_for {
let since = since.map(|s| {
u32::from_str_radix(s, 10)
.map_err(|e| format!(r#"cannot parse "since" option: {}"#, e))
})
.transpose()?
.unwrap_or(0);
get_events(conf, depool.unwrap(), since).await
} else {
wait_for_event(conf, depool.unwrap()).await
}
}
async fn print_event(ton: TonClient, event: &serde_json::Value) {
println!("event {}", event["id"].as_str().unwrap());
let body = event["body"].as_str().unwrap();
let result = ton_client::abi::decode_message_body(
ton.clone(),
ParamsOfDecodeMessageBody {
abi: load_abi(DEPOOL_ABI).unwrap(),
body: body.to_owned(),
is_internal: false,
..Default::default()
},
).await;
let (name, args) = if result.is_err() {
("unknown".to_owned(), "{}".to_owned())
} else {
let result = result.unwrap();
(result.name, serde_json::to_string(&result.value).unwrap())
};
println!("{} {} ({})\n{}\n",
name,
event["created_at"].as_u64().unwrap(),
event["created_at_string"].as_str().unwrap(),
args
);
}
async fn get_events(conf: Config, depool: &str, since: u32) -> Result<(), String> {
let ton = create_client_verbose(&conf)?;
let _addr = load_ton_address(depool, &conf)?;
let events = ton_client::net::query_collection(
ton.clone(),
ParamsOfQueryCollection {
collection: "messages".to_owned(),
filter: Some(events_filter(depool, since)),
result: "id body created_at created_at_string".to_owned(),
order: Some(vec![OrderBy{ path: "created_at".to_owned(), direction: SortDirection::DESC }]),
..Default::default()
},
).await.map_err(|e| format!("failed to query depool events: {}", e))?;
println!("{} events found", events.result.len());
for event in &events.result {
print_event(ton.clone(), event).await;
}
println!("Done");
Ok(())
}
async fn wait_for_event(conf: Config, depool: &str) -> Result<(), String> {
let ton = create_client_verbose(&conf)?;
let _addr = load_ton_address(depool, &conf)?;
println!("Waiting for a new event...");
let event = ton_client::net::wait_for_collection(
ton.clone(),
ParamsOfWaitForCollection {
collection: "messages".to_owned(),
filter: Some(events_filter(depool, now())),
result: "id body created_at created_at_string".to_owned(),
timeout: Some(conf.timeout),
..Default::default()
},
).await.map_err(|e| println!("failed to query event: {}", e.to_string()));
if event.is_ok() {
print_event(ton.clone(), &event.unwrap().result).await;
}
Ok(())
}
/*
* Stake commands
*/
async fn ordinary_stake_command(
cmd: CommandData<'_>,
) -> Result<(), String> {
let (depool, wallet, stake, keys) =
(Some(&cmd.depool), Some(&cmd.wallet), Some(cmd.stake), Some(&cmd.keys));
print_args!(depool, wallet, stake, keys);
add_ordinary_stake(cmd).await
}
async fn replenish_command(
cmd: CommandData<'_>,
) -> Result<(), String> {
let (depool, wallet, stake, keys) =
(Some(&cmd.depool), Some(&cmd.wallet), Some(cmd.stake), Some(&cmd.keys));
print_args!(depool, wallet, stake, keys);
replenish_stake(cmd).await
}
async fn ticktock_command(
conf: Config,
depool: &str,
wallet: &str,
keys: &str,
) -> Result<(), String> {
let (depool, wallet, keys) = (Some(depool), Some(wallet), Some(keys));
print_args!(depool, wallet, keys);
call_ticktock(conf, depool.unwrap(), wallet.unwrap(), keys.unwrap()).await
}
async fn transfer_stake_command(
m: &ArgMatches<'_>,
cmd: CommandData<'_>,
) -> Result<(), String> {
let dest = Some(m.value_of("DEST")
.ok_or("destination address is not defined.".to_string())?);
let (depool, wallet, stake, keys) =
(Some(&cmd.depool), Some(&cmd.wallet), Some(cmd.stake), Some(&cmd.keys));
print_args!(depool, wallet, stake, keys, dest);
transfer_stake(cmd, dest.unwrap()).await
}
async fn set_donor_command(
m: &ArgMatches<'_>,
conf: Config,
depool: &str,
wallet: &str,
keys: &str,
is_vesting: bool,
) -> Result<(), String> {
let (depool, wallet, keys) = (Some(depool), Some(wallet), Some(keys));
let donor = Some(m.value_of("DONOR")
.ok_or("donor is not defined.".to_string())?);
print_args!(depool, wallet, keys, donor);
set_donor(conf, depool.unwrap(), wallet.unwrap(), keys.unwrap(), is_vesting, donor.unwrap()).await
}
async fn exotic_stake_command(
m: &ArgMatches<'_>,
cmd: CommandData<'_>,
is_vesting: bool,
) -> Result<(), String> {
let withdrawal_period = Some(m.value_of("WPERIOD")
.ok_or("withdrawal period is not defined.".to_string())?);
let total_period = Some(m.value_of("TPERIOD")
.ok_or("total period is not defined.".to_string())?);
let beneficiary = Some(m.value_of("BENEFICIARY")
.ok_or("beneficiary is not defined.".to_string())?);
let (depool, wallet, stake, keys) = (Some(&cmd.depool), Some(&cmd.wallet), Some(cmd.stake), Some(&cmd.keys));
print_args!(depool, wallet, stake, keys, beneficiary, withdrawal_period, total_period);
let period_checker = |v| {
if v > 0 && v <= 36500 {
Ok(v)
} else {
Err(format!("period cannot be more than 36500 days"))
}
};
let wperiod = u32::from_str_radix(withdrawal_period.unwrap(), 10)
.map_err(|e| format!("invalid withdrawal period: {}", e))
.and_then(period_checker)?;
let tperiod = u32::from_str_radix(total_period.unwrap(), 10)
.map_err(|e| format!("invalid total period: {}", e))
.and_then(period_checker)?;
let wperiod = wperiod * 86400;
let tperiod = tperiod * 86400;
add_exotic_stake(cmd, beneficiary.unwrap(), wperiod, tperiod, is_vesting).await
}
async fn remove_stake_command(
cmd: CommandData<'_>,
) -> Result<(), String> {
let (depool, wallet, stake, keys) = (Some(&cmd.depool), Some(&cmd.wallet), Some(cmd.stake), Some(&cmd.keys));
print_args!(depool, wallet, stake, keys);
remove_stake(cmd).await
}
async fn withdraw_stake_command(
cmd: CommandData<'_>,
) -> Result<(), String> {
let (depool, wallet, stake, keys) = (Some(&cmd.depool), Some(&cmd.wallet), Some(cmd.stake), Some(&cmd.keys));
print_args!(depool, wallet, stake, keys);
withdraw_stake(cmd).await
}
async fn set_withdraw_command(
conf: Config,
depool: &str,
wallet: &str,
keys: &str,
enable: bool,
) -> Result<(), String> {
let (depool, wallet, keys) = (Some(depool), Some(wallet), Some(keys));
let withdraw = Some(if enable { "true" } else { "false" });
print_args!(depool, wallet, keys, withdraw);
set_withdraw(conf, depool.unwrap(), wallet.unwrap(), keys.unwrap(), enable).await
}
async fn add_ordinary_stake(cmd: CommandData<'_>) -> Result<(), String> {
let stake = u64::from_str_radix(&convert::convert_token(cmd.stake)?, 10)
.map_err(|e| format!(r#"failed to parse stake value: {}"#, e))?;
let body = encode_add_ordinary_stake(stake).await?;
let fee = u64::from_str_radix(&convert::convert_token(&cmd.depool_fee)?, 10)
.map_err(|e| format!(r#"failed to parse depool fee value: {}"#, e))?;
let value = (fee + stake) as f64 * 1.0 / 1e9;
call_contract(cmd.conf.clone(), &cmd.wallet, &cmd.depool, &format!("{}", value), &cmd.keys, &body, true).await
}
async fn replenish_stake(cmd: CommandData<'_>) -> Result<(), String> {
let body = encode_replenish_stake().await?;
call_contract(cmd.conf.clone(), &cmd.wallet, &cmd.depool, cmd.stake, &cmd.keys, &body, false).await
}
async fn call_ticktock(
conf: Config,
depool: &str,
wallet: &str,
keys: &str,
) -> Result<(), String> {
let body = encode_ticktock().await?;
call_contract(conf.clone(), wallet, depool, "1", keys, &body, false).await
}
async fn add_exotic_stake(
cmd: CommandData<'_>,
beneficiary: &str,
wp: u32,
tp: u32,
is_vesting: bool,
) -> Result<(), String> {
let beneficiary = load_ton_address(beneficiary, &cmd.conf)?;
let stake = u64::from_str_radix(&convert::convert_token(cmd.stake)?, 10)
.map_err(|e| format!(r#"failed to parse stake value: {}"#, e))?;
let body = if is_vesting {
encode_add_vesting_stake(stake, beneficiary.as_str(), tp, wp).await?
} else {
encode_add_lock_stake(stake, beneficiary.as_str(), tp, wp).await?
};
let fee = u64::from_str_radix(&convert::convert_token(&cmd.depool_fee)?, 10)
.map_err(|e| format!(r#"failed to parse depool fee value: {}"#, e))?;
let value = (fee + stake) as f64 * 1.0 / 1e9;
call_contract(cmd.conf.clone(), &cmd.wallet, &cmd.depool, &format!("{}", value), &cmd.keys, &body, true).await
}
async fn remove_stake(
cmd: CommandData<'_>,
) -> Result<(), String> {
let stake = u64::from_str_radix(
&convert::convert_token(cmd.stake)?, 10,
).unwrap();
let body = encode_remove_stake(stake).await?;
call_contract(cmd.conf.clone(), &cmd.wallet, &cmd.depool, &cmd.depool_fee, &cmd.keys, &body, true).await
}
async fn withdraw_stake(
cmd: CommandData<'_>,
) -> Result<(), String> {
let stake = u64::from_str_radix(
&convert::convert_token(cmd.stake)?, 10,
).unwrap();
let body = encode_withdraw_stake(stake).await?;
call_contract(cmd.conf.clone(), &cmd.wallet, &cmd.depool, &cmd.depool_fee, &cmd.keys, &body, true).await
}
async fn transfer_stake(cmd: CommandData<'_>, dest: &str) -> Result<(), String> {
let dest = load_ton_address(dest, &cmd.conf)?;
let stake = u64::from_str_radix(
&convert::convert_token(cmd.stake)?, 10,
).unwrap();
let body = encode_transfer_stake(dest.as_str(), stake).await?;
call_contract(cmd.conf.clone(), &cmd.wallet, &cmd.depool, &cmd.depool_fee, &cmd.keys, &body, true).await
}
async fn set_withdraw(
conf: Config,
depool: &str,
wallet: &str,
keys: &str,
enable: bool,
) -> Result<(), String> {
let body = encode_set_withdraw(enable).await?;
let value = conf.depool_fee.to_string();
call_contract(conf.clone(), &wallet, &depool, &format!("{}", value), &keys, &body, true).await
}
async fn set_donor(
conf: Config,
depool: &str,
wallet: &str,
keys: &str,
is_vesting: bool,
donor: &str,
) -> Result<(), String> {
let body = encode_set_donor(is_vesting, donor).await?;
let value = conf.depool_fee.to_string();
call_contract(conf.clone(), &wallet, &depool, &format!("{}", value), &keys, &body, true).await
}
async fn encode_body(func: &str, params: serde_json::Value) -> Result<String, String> {
let client = create_client_local()?;
ton_client::abi::encode_message_body(
client.clone(),
ParamsOfEncodeMessageBody {
abi: load_abi(DEPOOL_ABI)?,
call_set: CallSet::some_with_function_and_input(func, params).unwrap(),
is_internal: true,
..Default::default()
},
).await
.map_err(|e| format!("failed to encode body: {}", e))
.map(|r| r.body)
}
async fn encode_set_withdraw(flag: bool) -> Result<String, String> {
if flag {
encode_body("withdrawAll", json!({}))
} else {
encode_body("cancelWithdrawal", json!({}))
}.await
}
async fn encode_add_ordinary_stake(stake: u64) -> Result<String, String> {
encode_body("addOrdinaryStake", json!({
"stake": stake
})).await
}
async fn encode_replenish_stake() -> Result<String, String> {
encode_body("receiveFunds", json!({})).await
}
async fn encode_ticktock() -> Result<String, String> {
encode_body("ticktock", json!({})).await
}
async fn encode_add_vesting_stake(
stake: u64,
beneficiary: &str,
tperiod: u32,
wperiod: u32,
) -> Result<String, String> {
encode_body("addVestingStake", json!({
"stake": stake,
"beneficiary": beneficiary,
"withdrawalPeriod": wperiod,
"totalPeriod": tperiod
})).await
}
async fn encode_set_donor(is_vesting: bool, donor: &str) -> Result<String, String> {
if is_vesting {
encode_body("setVestingDonor", json!({
"donor": donor
}))
} else {
encode_body("setLockDonor", json!({
"donor": donor
}))
}.await
}
async fn encode_add_lock_stake(
stake: u64,
beneficiary: &str,
tperiod: u32,
wperiod: u32,
) -> Result<String, String> {
encode_body("addLockStake", json!({
"stake": stake,
"beneficiary": beneficiary,
"withdrawalPeriod": wperiod,
"totalPeriod": tperiod
})).await
}
async fn encode_remove_stake(target_value: u64) -> Result<String, String> {
encode_body("withdrawFromPoolingRound", json!({
"withdrawValue": target_value
})).await
}
async fn encode_withdraw_stake(target_value: u64) -> Result<String, String> {
encode_body("withdrawPart", json!({
"withdrawValue": target_value
})).await
}
async fn encode_transfer_stake(dest: &str, amount: u64) -> Result<String, String> {
encode_body("transferStake", json!({
"dest": dest,
"amount": amount
})).await
}
async fn call_contract(
conf: Config,
wallet: &str,
depool: &str,
value: &str,
keys: &str,
body: &str,
answer_is_expected: bool
) -> Result<(), String> {
if conf.no_answer {
send_with_body(conf.clone(), wallet, depool, value, keys, body).await
} else {
call_contract_and_get_answer(
conf.clone(),
wallet,
depool,
value,
keys,
body,
answer_is_expected
).await
}
}
async fn call_contract_and_get_answer(
conf: Config,
src_addr: &str,
dest_addr: &str,
value: &str,
keys: &str,
body: &str,
answer_is_expected: bool
) -> Result<(), String> {
let ton = create_client_verbose(&conf)?;
let abi = load_abi(MSIG_ABI)?;
let start = now();
let params = json!({
"dest": dest_addr,
"value": convert::convert_token(value)?,
"bounce": true,
"allBalance": false,
"payload": body,
}).to_string();
let msg = prepare_message(
ton.clone(),
src_addr,
abi.clone(),
"submitTransaction",
¶ms,
None,
Some(keys.to_owned()),
false
).await?;
print_encoded_message(&msg);
println!("Multisig message processing... ");
let callback = |_| {
async move {}
};
let result = send_message(
ton.clone(),
ParamsOfSendMessage {
message: msg.message.clone(),
abi: Some(abi.clone()),
send_events: false,
..Default::default()
},
callback,
).await
.map_err(|e| format!("Failed: {:#}", e))?;
wait_for_transaction(
ton.clone(),
ParamsOfWaitForTransaction {
abi: Some(abi.clone()),
message: msg.message.clone(),
shard_block_id: result.shard_block_id,
send_events: true,
..Default::default()
},
callback.clone(),
).await
.map_err(|e| format!("Failed: {:#}", e))?;
println!("\nMessage was successfully sent to the multisig, waiting for message to be sent to the depool...");
let message = ton_client::net::wait_for_collection(
ton.clone(),
ParamsOfWaitForCollection {
collection: "messages".to_owned(),
filter: Some(answer_filter(src_addr, dest_addr, start)),
result: "id body created_at created_at_string".to_owned(),
timeout: Some(conf.timeout),
..Default::default()
},
).await.map_err(|e| println!("failed to query message: {}", e.to_string()));
if message.is_err() {
println!("\nRequest failed. Check the contract balance to be great enough to cover transfer value with possible fees.");
return Ok(());
}
println!("\nRequest was successfully sent to depool.");
if answer_is_expected {
println!("\nWaiting for depool answer...");
let mut statuses: HashMap<u32, &str> = HashMap::new();
statuses.insert(0, "SUCCESS");
statuses.insert(1, "STAKE_TOO_SMALL");
statuses.insert(3, "DEPOOL_CLOSED");
statuses.insert(6, "NO_PARTICIPANT");
statuses.insert(9, "PARTICIPANT_ALREADY_HAS_VESTING");
statuses.insert(10, "WITHDRAWAL_PERIOD_GREATER_TOTAL_PERIOD");
statuses.insert(11, "TOTAL_PERIOD_MORE_18YEARS");
statuses.insert(12, "WITHDRAWAL_PERIOD_IS_ZERO");
statuses.insert(13, "TOTAL_PERIOD_IS_NOT_DIVISIBLE_BY_WITHDRAWAL_PERIOD");
statuses.insert(16, "REMAINING_STAKE_LESS_THAN_MINIMAL");
statuses.insert(17, "PARTICIPANT_ALREADY_HAS_LOCK");
statuses.insert(18, "TRANSFER_AMOUNT_IS_TOO_BIG");
statuses.insert(19, "TRANSFER_SELF");
statuses.insert(20, "TRANSFER_TO_OR_FROM_VALIDATOR");
statuses.insert(21, "FEE_TOO_SMALL");
statuses.insert(22, "INVALID_ADDRESS");
statuses.insert(23, "INVALID_DONOR");
statuses.insert(24, "NO_ELECTION_ROUND");
statuses.insert(25, "INVALID_ELECTION_ID");
statuses.insert(26, "TRANSFER_WHILE_COMPLETING_STEP");
statuses.insert(27, "NO_POOLING_STAKE");
let message = ton_client::net::wait_for_collection(
ton.clone(),
ParamsOfWaitForCollection {
collection: "messages".to_owned(),
filter: Some(answer_filter(dest_addr, src_addr, start)),
result: "id body created_at created_at_string value".to_owned(),
timeout: Some(conf.timeout),
..Default::default()
},
).await.map_err(|e| println!("failed to query answer: {}", e.to_string()));
if message.is_ok() {
let message = message.unwrap().result;
println!("\nAnswer: ");
let (name, args) = print_message(ton.clone(), &message, PARTICIPANT_ABI, true).await;
if name == "receiveAnswer" {
let args: serde_json::Value = serde_json::from_str(&args).unwrap();
let status = args["errcode"].as_str().unwrap().parse::<u32>().unwrap();
let comment = args["comment"].as_str().unwrap();
if statuses.contains_key(&status) {
println!("Answer status: {}\nComment: {}", statuses[&status], comment);
} else {
println!("Answer status: Unknown({})\nComment: {}", status, comment);
}
}
println!();
} else {
println!("\nThere were no answer messages during the timeout period.\n");
}
}
println!("Done");
Ok(())
} | 37.15425 | 158 | 0.597492 |
e8bed5f5a7f942f8a4b252e22cf7c0081d5a2428 | 5,466 | use cairo;
use std::collections::HashMap;
use std::fmt;
#[derive(Clone, Copy)]
pub struct Color {
red: f64,
green: f64,
blue: f64,
alpha: f64,
}
#[derive(Debug, Deserialize, PartialEq, Eq, Hash, Clone, Copy)]
pub enum Layer {
Background,
Grid,
Axes,
#[serde(rename = "F.Cu")]
FCu,
#[serde(rename = "*.Cu")]
Cu,
#[serde(rename = "F.SilkS")]
FSilkS,
#[serde(rename = "F.Fab")]
FFab,
#[serde(rename = "F.CrtYd")]
FCrtYd,
#[serde(rename = "F.Mask")]
FMask,
#[serde(rename = "*.Mask")]
Mask,
#[serde(rename = "F.Paste")]
FPaste,
}
impl fmt::Display for Layer {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Layer::Background => write!(f, "Background"),
Layer::Grid => write!(f, "Grid"),
Layer::Axes => write!(f, "Axes"),
Layer::FCu => write!(f, "F.Cu"),
Layer::Cu => write!(f, "*.Cu"),
Layer::FSilkS => write!(f, "F.SilkS"),
Layer::FFab => write!(f, "F.Fab"),
Layer::FCrtYd => write!(f, "F.CrtYd"),
Layer::FMask => write!(f, "F.Mask"),
Layer::Mask => write!(f, "*.Mask"),
Layer::FPaste => write!(f, "F.Paste"),
}
}
}
pub struct LayerStat {
pub color: Color,
pub z: i64,
}
impl Color {
pub fn set_source(&self, cr: &cairo::Context) {
cr.set_source_rgba(self.red, self.green, self.blue, self.alpha);
}
}
// kicad: see in common/colors.cpp
// and common/class_colors_design_settings.cpp
lazy_static! {
pub static ref LAYER: HashMap<Layer, LayerStat> = {
let mut m = HashMap::new();
m.insert(
Layer::Background,
LayerStat {
color: Color {
red: 0.0,
green: 0.0,
blue: 0.0,
alpha: 1.0,
},
z: -100,
},
);
m.insert(
Layer::Grid,
LayerStat {
color: Color {
red: 0.52,
green: 0.52,
blue: 0.52,
alpha: 1.0,
},
z: -90,
},
);
m.insert(
Layer::Axes,
LayerStat {
color: Color {
red: 0.0,
green: 0.0,
blue: 0.52,
alpha: 1.0,
},
z: -80,
},
);
let color = Color {
red: 1.0,
green: 0.0,
blue: 0.0,
alpha: 0.52,
};
m.insert(Layer::FCu, LayerStat { color, z: 1 });
m.insert(Layer::Cu, LayerStat { color, z: 1 });
m.insert(
Layer::FMask,
LayerStat {
color: Color {
red: 0.0,
green: 0.00,
blue: 1.00,
alpha: 0.52,
},
z: 8,
},
);
m.insert(
Layer::FPaste,
LayerStat {
color: Color {
red: 1.0,
green: 0.82,
blue: 0.26,
alpha: 0.83,
},
z: 9,
},
);
m.insert(
Layer::FSilkS,
LayerStat {
color: Color {
red: 1.0,
green: 1.0,
blue: 1.0,
alpha: 0.83,
},
z: 11,
},
);
m.insert(
Layer::FFab,
LayerStat {
color: Color {
red: 1.0,
green: 1.0,
blue: 0.0,
alpha: 0.76,
},
z: 12,
},
);
m.insert(
Layer::FCrtYd,
LayerStat {
color: Color {
red: 0.5,
green: 0.5,
blue: 0.5,
alpha: 0.76,
},
z: 13,
},
);
m
};
pub static ref LAYER_Z: Vec<(i64, Layer)> = {
let mut v = vec![];
for (ref k, ref x) in LAYER.iter() {
v.push((x.z, **k));
}
v.sort_by(|(i, _), (j, _)| i.cmp(j));
v
};
}
/*
color_schemes['default'] = {
'background': (0.0, 0.0, 0.0, 1.0),
'grid': (0.5, 0.5, 0.5, 1.0),
'axes': (1.0, 0.0, 0.0, 1.0),
'name': (1.0, 1.0, 1.0, 1.0),
'value': (1.0, 1.0, 1.0, 1.0),
'silk': (1.0, 1.0, 1.0, 1.0),
'bsilk': (0.7, 0.7, 0.7, 0.3),
'docu': (1.0, 1.0, 0.0, 0.7),
'smd': (0.0, 0.0, 1.0, 1.0),
'pad': (0.0, 1.0, 0.0, 1.0),
'meta': (1.0, 1.0, 1.0, 1.0),
'restrict': (0.0, 1.0, 0.0, 0.3),
'stop': (0.0, 1.0, 1.0, 0.3),
'keepout': (1.0, 0.0, 0.5, 0.7),
'bkeepout': (0.7, 0.0, 0.35, 0.4),
'vrestrict': (0.0, 1.0, 0.0, 0.4),
'unknown': (1.0, 0.0, 1.0, 0.7),
'hole': (1.0, 1.0, 1.0, 0.7),
'edge': (1.0, 1.0, 1.0, 0.7),
'paste': (0.0, 1.0, 0.0, 0.7),
'bpaste': (0.0, 0.7, 0.0, 0.3),
'comments': (0.82, 0.66, 0.63, 0.8),
'assembly': (0.9, 0.6, 0.3, 0.8),
'bassembly': (0.65, 0.5, 0.2, 0.5),
'user1': (0.3, 0.6, 0.0, 0.7),
'user2': (0.3, 0.0, 0.6, 0.7)
}
*/
| 24.845455 | 72 | 0.363703 |
f49d95914e6d34cc80f45076708dc0ff4704b3d3 | 4,939 | use serde::{Deserialize, Serialize};
use std::hash::{Hash, Hasher};
pub trait ToGridPointND<T>
where
T: Clone,
{
fn to_nd(&self) -> GridPointND<T>;
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GridPoint1D<T> {
pub x: T,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GridPoint2D<T> {
pub x: T,
pub y: T,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GridPoint3D<T> {
pub x: T,
pub y: T,
pub z: T,
}
#[derive(Clone, Debug)]
pub struct GridPointND<T> {
indices: Vec<T>,
}
impl<T> ToGridPointND<T> for GridPoint1D<T>
where
T: Clone,
{
fn to_nd(&self) -> GridPointND<T> {
GridPointND::new(vec![self.x.clone()].iter())
}
}
impl<T> ToGridPointND<T> for GridPoint2D<T>
where
T: Clone,
{
fn to_nd(&self) -> GridPointND<T> {
GridPointND::new(vec![self.x.clone(), self.y.clone()].iter())
}
}
impl<T> ToGridPointND<T> for GridPoint3D<T>
where
T: Clone,
{
fn to_nd(&self) -> GridPointND<T> {
GridPointND::new(vec![self.x.clone(), self.y.clone(), self.z.clone()].iter())
}
}
impl<T> GridPointND<T> {
pub fn new<'a, 'b, I>(indices: I) -> Self
where
'a: 'b,
T: 'a + Clone,
I: Iterator<Item = &'b T>,
{
Self {
indices: indices.map(|ele| ele.clone()).collect(),
}
}
pub fn indices<'a>(&'a self) -> std::slice::Iter<'a, T> {
self.indices.iter()
}
pub fn to_1d(&self) -> Option<GridPoint1D<T>>
where
T: Clone,
{
let mut iter = self.indices();
let x = iter.next();
if x.is_none() {
return None;
}
match iter.next() {
Some(_) => None,
None => Some(GridPoint1D {
x: x.unwrap().clone(),
}),
}
}
pub fn to_2d(&self) -> Option<GridPoint2D<T>>
where
T: Clone,
{
let mut iter = self.indices();
let x = iter.next();
if x.is_none() {
return None;
}
let y = iter.next();
if y.is_none() {
return None;
}
match iter.next() {
Some(_) => None,
None => Some(GridPoint2D {
x: x.unwrap().clone(),
y: y.unwrap().clone(),
}),
}
}
pub fn to_3d(&self) -> Option<GridPoint3D<T>>
where
T: Clone,
{
let mut iter = self.indices();
let x = iter.next();
if x.is_none() {
return None;
}
let y = iter.next();
if y.is_none() {
return None;
}
let z = iter.next();
if z.is_none() {
return None;
}
match iter.next() {
Some(_) => None,
None => Some(GridPoint3D {
x: x.unwrap().clone(),
y: y.unwrap().clone(),
z: z.unwrap().clone(),
}),
}
}
}
impl<T> PartialEq for GridPoint1D<T>
where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.x == other.x
}
}
impl<T> PartialEq for GridPoint2D<T>
where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.x == other.x && self.y == other.y
}
}
impl<T> PartialEq for GridPoint3D<T>
where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.x == other.x && self.y == other.y && self.z == other.z
}
}
impl<T> PartialEq for GridPointND<T>
where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
let mut not_eq_res = self.indices().zip(other.indices()).filter(|(a, b)| a != b);
match not_eq_res.next() {
Some(_) => false,
None => true,
}
}
}
impl<T> Eq for GridPoint1D<T> where T: PartialEq {}
impl<T> Eq for GridPoint2D<T> where T: PartialEq {}
impl<T> Eq for GridPoint3D<T> where T: PartialEq {}
impl<T> Eq for GridPointND<T> where T: PartialEq {}
impl<T> Hash for GridPoint1D<T>
where
T: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.x.hash(state);
}
}
impl<T> Hash for GridPoint2D<T>
where
T: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.x.hash(state);
self.y.hash(state);
}
}
impl<T> Hash for GridPoint3D<T>
where
T: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.x.hash(state);
self.y.hash(state);
self.z.hash(state);
}
}
impl<T> Hash for GridPointND<T>
where
T: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
for idx in self.indices() {
idx.hash(state)
}
}
}
impl<T> GridPoint3D<T> {
pub fn new(x: T, y: T, z: T) -> Self {
Self { x, y, z }
}
}
impl<T> GridPoint2D<T> {
pub fn new(x: T, y: T) -> Self {
Self { x, y }
}
}
impl<T> GridPoint1D<T> {
pub fn new(x: T) -> Self {
Self { x }
}
}
| 19.995951 | 89 | 0.497064 |
e827ce300d5cdb7d9ce5da57c554ba8d2d075055 | 955 | use modular_bitfield_msb::prelude::*;
#[derive(BitfieldSpecifier, Debug, PartialEq, Copy, Clone)]
pub enum Mode {
A = 0b00,
B = 0b01,
C = 0b10,
D = 0b11,
}
#[bitfield]
#[derive(Debug)]
pub struct StatFlag {
x: bool,
y: bool,
z: B4,
#[bits = 2]
mode: Mode,
}
fn main() {
let mut flag = StatFlag::new();
assert_eq!(flag.x(), false);
assert_eq!(flag.y(), false);
assert_eq!(flag.z(), 0);
assert_eq!(flag.mode(), Mode::A);
let new_mode = Mode::B;
flag.set_mode(new_mode);
assert_eq!(flag.mode(), new_mode);
flag.set_x(true);
assert_eq!(flag.x(), true);
assert_eq!(flag.mode(), new_mode);
flag.set_y(true);
assert_eq!(flag.y(), true);
assert_eq!(flag.mode(), new_mode);
flag.set_z(0b01);
assert_eq!(flag.z(), 0b01);
assert_eq!(flag.mode(), new_mode);
flag.set_z(0b11);
assert_eq!(flag.z(), 0b11);
assert_eq!(flag.mode(), new_mode);
}
| 19.1 | 59 | 0.582199 |
79bb13ea5ed23bfe5db2c9f710fc782866eabf54 | 5,509 | use super::action::EditKind;
use crate::action::ActionKind;
use crate::mode::Mode;
use crate::position::Position;
use crate::selection::Selection;
use crate::store::{RootStore, Store};
use core::ops::Range;
use tree_sitter::{InputEdit, Point};
pub(crate) struct EditStore<'a> {
root: &'a mut RootStore,
}
impl<'a> Store for EditStore<'a> {
fn root(&self) -> &RootStore {
&self.root
}
fn root_mut(&mut self) -> &mut RootStore {
&mut self.root
}
}
impl<'a> EditStore<'a> {
pub(crate) fn new(root: &'a mut RootStore) -> Self {
Self { root }
}
fn insert(&mut self, to: usize, s: &str) {
let pos = self.state().buffer.get_position_by_offset(to);
let byte_l = s.bytes().count();
let edit = InputEdit {
start_byte: to,
old_end_byte: to,
new_end_byte: to + byte_l,
start_position: Point::default(),
old_end_position: Point::default(),
new_end_position: Point::default(),
};
self.highlighter_mut().edit_tree(&edit);
self.state_mut().buffer.insert(pos, s);
}
fn remove(&mut self, range: Range<usize>) -> String {
let start = range.start;
let s = self.state_mut().buffer.remove(range);
let byte_l = s.bytes().count();
let edit = InputEdit {
start_byte: start,
old_end_byte: start + byte_l,
new_end_byte: start,
start_position: Point::default(),
old_end_position: Point::default(),
new_end_position: Point::default(),
};
self.highlighter_mut().edit_tree(&edit);
s
}
pub(crate) fn remove_char(&mut self, count: usize) {
let start = self.state().get_cursor_offset();
let yank = self.remove(start..start + count);
self.root_mut().action(ActionKind::SetYank(yank).once());
}
pub(crate) fn remove_selection(&mut self, selection: &Selection, count: usize) {
for _ in 0..count {
let range = self.state().measure_selection(selection.clone());
let start = range.start;
let yank = self.remove(range);
self.root_mut().action(ActionKind::SetYank(yank).once());
self.root_mut().movement().offset(start);
}
}
pub(crate) fn append_yank(&mut self, count: usize) {
let col = if self.state().yanked.ends_with('\n') {
self.root_mut().movement().down(1);
0
} else {
self.root_mut().movement().right(1);
self.state().cursor.col
};
let to = self.state().buffer.get_offset_by_position(Position {
col,
row: self.state().cursor.row,
});
let s = self.state().yanked.clone();
for _ in 0..count {
self.insert(to, &s);
}
}
pub(crate) fn insert_yank(&mut self, count: usize) {
let col = if self.state().yanked.ends_with('\n') {
0
} else {
self.state().cursor.col
};
let to = self.state().buffer.get_offset_by_position(Position {
col,
row: self.state().cursor.row,
});
let s = self.state().yanked.clone();
for _ in 0..count {
self.insert(to, &s);
}
}
pub(crate) fn line_break(&mut self, count: usize) {
let to = self.state().get_cursor_offset();
for _ in 0..count {
self.insert(to, "\n");
if let Mode::Insert(_, s) = &mut self.state_mut().mode {
s.push('\n');
}
self.root_mut().movement().down(1);
self.root_mut().movement().line_head();
}
}
pub(crate) fn insert_char(&mut self, c: char, count: usize) {
let to = self.state().get_cursor_offset();
for _ in 0..count {
if let Mode::Insert(_, s) = &mut self.state_mut().mode {
s.push(c);
}
self.insert(to, &c.to_string());
self.root_mut().movement().right(1);
}
self.history_mut().pop();
}
pub(crate) fn insert_string(&mut self, s: &str, count: usize) {
let to = self.state().get_cursor_offset();
for _ in 0..count {
self.insert(to, &s);
self.root_mut().movement().right(s.chars().count());
}
}
pub(crate) fn edit(&mut self, selection: &Selection, s: &str) {
self.remove_selection(selection, 1);
self.insert_string(s, 1);
self.root_mut().movement().left(1);
}
pub(crate) fn action(&mut self, edit: EditKind, count: usize) {
use EditKind::*;
let record = self.root().create_record();
self.history_mut().push(record);
match &edit {
RemoveChar => self.remove_char(count),
RemoveSelection(selection) => self.remove_selection(&selection, count),
AppendYank => self.append_yank(count),
InsertYank => self.insert_yank(count),
LineBreak => self.line_break(count),
InsertChar(c) => self.insert_char(*c, count),
InsertString(m, s) => {
if let Some(m) = m {
self.root_mut().movement().action(m.clone(), 1);
}
self.insert_string(s, count)
}
Edit(selection, s) => self.edit(selection, s),
};
self.state_mut().prev_edit = Some((edit, count));
}
}
| 31.48 | 84 | 0.533854 |
294140f22b1a39811e460318f9e0552c95c16a11 | 193 | #![no_main]
#[macro_use] extern crate libfuzzer_sys;
extern crate dora;
fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
dora::run_content(s);
}
});
| 19.3 | 46 | 0.595855 |
3affb00a899077a5eddccb44f56a8a735654894a | 1,458 |
pub struct IconPlayForWork {
props: crate::Props,
}
impl yew::Component for IconPlayForWork {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M11 5v5.59H7.5l4.5 4.5 4.5-4.5H13V5h-2zm-5 9c0 3.31 2.69 6 6 6s6-2.69 6-6h-2c0 2.21-1.79 4-4 4s-4-1.79-4-4H6z"/></svg>
</svg>
}
}
}
| 31.695652 | 261 | 0.572016 |
1ad996e1360291ec225740f3ca88106994a2ace6 | 16,929 | use super::{detector, errors, observer, protomitch, protomitch_pb, reactor, MemberInfo};
use byteorder::{NetworkEndian, ReadBytesExt};
use futures::prelude::*;
use futures::{future, stream, sync::mpsc, sync::oneshot};
use native_tls;
use std::{io, mem, net, time};
use stream_cancel;
use tokio;
use tokio_tls;
use stream_cancel::{Trigger, Valve};
use tokio::executor::Executor;
use tokio::prelude::*;
pub(crate) type TlsTcpStream = tokio_tls::TlsStream<tokio::net::TcpStream>;
pub(crate) type FutureSpawn = Box<Future<Item = (), Error = ()> + Send + 'static>;
pub(crate) type FutureTLS =
Box<Future<Item = TlsTcpStream, Error = errors::Error> + Send + 'static>;
pub(crate) type FutureMitchMsg = Box<
Future<Item = (TlsTcpStream, protomitch_pb::MitchMsg), Error = errors::Error> + Send + 'static,
>;
/// Configuration builder for a swarm member.
pub struct MitchConfig {
listener: Option<tokio::net::tcp::TcpListener>,
local: MemberInfo,
tls_acceptor: Option<native_tls::TlsAcceptor>,
tls_connector: Option<native_tls::TlsConnector>,
notifications_tx: Option<mpsc::Sender<observer::SwarmNotification>>,
}
impl Default for MitchConfig {
fn default() -> Self {
Self {
listener: None,
notifications_tx: None,
local: MemberInfo::default(),
tls_acceptor: None,
tls_connector: None,
}
}
}
impl MitchConfig {
/// Set metadata to be advertised by local member.
pub fn local_metadata(mut self, metadata: Vec<u8>) -> errors::Result<Self> {
if metadata.len() > MemberInfo::MAX_METADATA {
bail!("metadata larger than maximum size");
}
self.local.metadata = metadata;
Ok(self)
}
/// Set a custom TCP listener.
pub fn listener(mut self, listener: Option<tokio::net::tcp::TcpListener>) -> Self {
self.listener = listener;
self
}
/// Set a custom TLS acceptor (with server certificate).
pub fn tls_acceptor(mut self, tls_acceptor: Option<native_tls::TlsAcceptor>) -> Self {
self.tls_acceptor = tls_acceptor;
self
}
/// Set a custom TLS connector (with client certificate).
pub fn tls_connector(mut self, tls_connector: Option<native_tls::TlsConnector>) -> Self {
self.tls_connector = tls_connector;
self
}
/// Set a channel where to receive notifications about swarm changes.
pub fn notifications_channel(
mut self,
tx: Option<mpsc::Sender<observer::SwarmNotification>>,
) -> Self {
self.notifications_tx = tx;
self
}
/// Finalize and create the node for the swarm.
pub fn build(self) -> super::FutureSwarm {
// Cancellation helpers for internal tasks.
let (trigger, valve) = stream_cancel::Valve::new();
// TLS, client-side.
let tls_connector = match self.tls_connector {
Some(tc) => tc,
None => {
let fut_err = future::err("Client TLS configuration missing".into());
return Box::new(fut_err);
}
};
// TLS, server-side
let tls_acceptor = match self.tls_acceptor {
Some(ta) => ta,
None => {
let fut_err = future::err("Server TLS configuration missing".into());
return Box::new(fut_err);
}
};
// Membership channels.
let (mems_tx, mems_rx) = mpsc::channel(200);
let cluster = SwarmMembers {
members: Some(vec![]),
reactor_tx: mems_tx,
events_rx: Some(mems_rx),
};
// Failure detector channels.
let (det_tx, det_rx) = mpsc::channel(200);
let detector = SwarmDetector {
det_tx,
det_rx: Some(det_rx),
};
let swarm = MitchSwarm {
notifications_tx: self.notifications_tx,
local: self.local,
members: cluster,
detector,
trigger: Some(trigger),
valve,
tls_connector,
};
swarm.start(self.listener, tls_acceptor)
}
}
#[derive(Debug)]
pub(crate) struct SwarmDetector {
pub(crate) det_tx: mpsc::Sender<detector::Event>,
pub(crate) det_rx: Option<mpsc::Receiver<detector::Event>>,
}
/// Local swarm member.
#[derive(Debug)]
pub(crate) struct SwarmMembers {
pub(crate) members: Option<Vec<MemberInfo>>,
pub(crate) reactor_tx: mpsc::Sender<reactor::Event>,
pub(crate) events_rx: Option<mpsc::Receiver<reactor::Event>>,
}
/// Local swarm member.
pub struct MitchSwarm {
// Local node information.
pub(crate) local: MemberInfo,
// Swarm peers and membership handling.
pub(crate) members: SwarmMembers,
// Failure detector.
pub(crate) detector: SwarmDetector,
// TLS, client-side.
pub(crate) tls_connector: native_tls::TlsConnector,
// Notitications to external observers.
pub(crate) notifications_tx: Option<mpsc::Sender<observer::SwarmNotification>>,
pub(crate) trigger: Option<Trigger>,
pub(crate) valve: Valve,
}
impl MitchSwarm {
// Main server task.
fn serve_incoming(
&mut self,
listener: Option<tokio::net::TcpListener>,
tls_acceptor: tokio_tls::TlsAcceptor,
) -> FutureSpawn {
// Initialize TCP listener.
let listener = match listener {
Some(l) => l,
None => {
let tcp_listener = tokio::net::tcp::TcpListener::bind(&self.local.target);
match tcp_listener {
Ok(tl) => tl,
Err(e) => {
error!("{}", e);
return Box::new(future::err(()));
}
}
}
};
self.local.target = match listener.local_addr() {
Ok(addr) => addr,
Err(e) => {
error!("unable to get local socket address: {}", e);
return Box::new(future::err(()));
}
};
let tx = self.members.reactor_tx.clone();
let fut_server = self
.valve
.wrap(listener.incoming().map_err(errors::Error::from))
.and_then(move |tcp| {
let tx = tx.clone();
let fut = tls_acceptor
.accept(tcp)
.map_err(errors::Error::from)
.inspect(|tls| trace!("TLS accepted: {:?}", tls.get_ref().get_ref()))
.and_then(|tls| read_protomsg(tls, 5))
.and_then(move |(tls, msg)| dispatch(tls, msg, tx, 5))
.map_err(|e| errors::Error::from(format!("serve_incoming error: {}", e)));
Ok(fut)
}).buffer_unordered(10)
.then(|res| match res {
Ok(r) => Ok(Some(r)),
Err(err) => {
error!("server error: {}", err);
Ok(None)
}
}).filter_map(|x| x)
.for_each(|_| Ok(()));
Box::new(fut_server)
}
// Start swarming.
fn start(
self,
listener: Option<tokio::net::TcpListener>,
tls_acceptor: native_tls::TlsAcceptor,
) -> super::FutureSwarm {
let banner = format!(
"Starting local node {}, nickname \"{}\"",
self.local.id, self.local.nickname
);
let tls = tokio_tls::TlsAcceptor::from(tls_acceptor);
let fut_start = future::ok(self)
.inspect(move |_| info!("{}", banner))
.and_then(move |mut sw| {
// Internal tasks.
let fut_detector = super::detector::detector_task(&mut sw);
let fut_server = sw.serve_incoming(listener, tls);
let fut_mems = super::reactor::membership_task(&mut sw);
// Spawn all internal tasks.
let membership_task = tokio::executor::DefaultExecutor::current().spawn(fut_mems);
let server_task = tokio::executor::DefaultExecutor::current().spawn(fut_server);
let detector_task = tokio::executor::DefaultExecutor::current().spawn(fut_detector);
// Chain all results and pass the MitchSwarm through.
future::ok(sw)
.then(|sw| membership_task.and(sw))
.then(|sw| server_task.and(sw))
.then(|sw| detector_task.and(sw))
.from_err()
}).inspect(|_| debug!("MitchSwarm started"));
Box::new(fut_start)
}
/// Return a membership observer.
pub fn membership(&self) -> observer::Membership {
observer::Membership {
reactor_tx: self.members.reactor_tx.clone(),
}
}
/// Stop this swarm member.
pub fn stop(self) -> super::FutureTask {
let fut_stop = future::ok(self)
.and_then(|swarm| {
let (tx, rx) = oneshot::channel();
let ch = swarm.members.reactor_tx.clone();
ch.send(reactor::Event::Shutdown(tx))
.and_then(|mut ch| ch.close())
.map_err(|_| errors::Error::from("stop: send error"))
.and_then(|_| rx.from_err())
.map(move |_| swarm)
}).and_then(|mut swarm| {
// Cancel all internal tasks.
if let Some(trigger) = swarm.trigger.take() {
trigger.cancel();
}
// Consume this swarm member.
drop(swarm);
Ok(())
}).inspect(|_| debug!("MitchSwarm stopped"));
Box::new(fut_stop)
}
/// Join an existing swarm, synchronizing the initial set of peers.
pub fn join(
&self,
dests: Vec<net::SocketAddr>,
initial_pull_size: Option<u32>,
) -> super::FutureTask {
let tls_connector = self.tls_connector.clone();
let tx = self.members.reactor_tx.clone();
let local = self.local.clone();
let par_reqs = dests.len().saturating_add(1);
let fut_join = stream::iter_ok(dests)
.and_then(move |dst| {
let tls = tls_connector.clone();
trace!("Joining {:?}", &dst);
let info = local.clone();
tls_connect(tls.clone(), &dst, 8)
.and_then(move |tls| {
protomitch::join(&info)
.and_then(|payload| tokio::io::write_all(tls, payload).from_err())
}).timeout(time::Duration::from_secs(10))
.map_err(|e| errors::Error::from(format!("ping error: {}", e)))
.and_then(move |_| Ok((tls, dst)))
}).and_then(move |(tls_connector, dst)| {
trace!("Pulling from {:?}", &dst);
tls_connect(tls_connector.clone(), &dst, 8)
.and_then(move |tls| {
protomitch::pull(initial_pull_size)
.and_then(|payload| tokio::io::write_all(tls, payload).from_err())
}).timeout(time::Duration::from_secs(10))
.map_err(|e| errors::Error::from(format!("pull error: {}", e)))
}).and_then(move |(tls, _)| {
trace!("Syncing");
read_protomsg(tls, 5)
}).and_then(|(_tls, msg)| {
trace!("Parsing protobuf");
match msg.value {
Some(protomitch_pb::mitch_msg::Value::Sync(s)) => Ok(s),
_ => Err("foo".into()),
}
}).and_then(move |msg| {
let reactor_tx = tx.clone();
futures::stream::iter_ok(msg.members).for_each(move |member| {
let reactor_tx = reactor_tx.clone();
future::result(protomitch::join_info(member)).and_then(move |member_info| {
let join_event = reactor::Event::Join(member_info);
reactor_tx
.clone()
.send(join_event)
.map(|_| ())
.map_err(|_| "sync error".into())
})
})
}).map(|_| Ok(()))
.buffer_unordered(par_reqs)
.for_each(|_| Ok(()))
.from_err();
Box::new(fut_join)
}
}
pub(crate) fn dispatch(
tls: tokio_tls::TlsStream<tokio::net::TcpStream>,
msg: protomitch_pb::MitchMsg,
tx: mpsc::Sender<reactor::Event>,
_timeout: u64,
) -> FutureTLS {
use protomitch_pb::mitch_msg::Value;
let fut_void = future::ok((tls, msg))
.and_then(|(tls, msg)| match msg.value {
None => future::err("dispatch: None value".into()),
Some(value) => future::ok((tls, value)),
}).and_then(|(tls, value)| match value {
Value::Failed(msg) => process_failed(tls, msg, tx),
Value::Join(msg) => process_join(tls, msg, tx),
Value::Ping(_) => process_ping(tls),
Value::Pull(msg) => process_pull(tls, msg, tx),
Value::Sync(_) => Box::new(future::err("unexpected sync message".into())),
//_ => Box::new(future::err("dispatch: unknown value".into())),
});
Box::new(fut_void)
}
pub(crate) fn process_ping(tls: tokio_tls::TlsStream<tokio::net::TcpStream>) -> FutureTLS {
let fut_tls = future::ok(tls).inspect(|_| trace!("process_ping"));
Box::new(fut_tls)
}
pub(crate) fn process_failed(
tls: tokio_tls::TlsStream<tokio::net::TcpStream>,
msg: protomitch_pb::FailedMsg,
tx: mpsc::Sender<reactor::Event>,
) -> FutureTLS {
let fut_tls = future::ok((tls, msg, tx)).and_then(|(tls, msg, tx)| {
let event = reactor::Event::Failed(msg.id);
tx.send(event)
.map_err(|e| errors::Error::from(format!("process_failed error: {}", e)))
.and_then(move |_| Ok(tls))
});
Box::new(fut_tls)
}
pub(crate) fn process_join(
tls: tokio_tls::TlsStream<tokio::net::TcpStream>,
msg: protomitch_pb::JoinMsg,
tx: mpsc::Sender<reactor::Event>,
) -> FutureTLS {
let fut_tls = future::result(protomitch::join_info(msg)).and_then(move |mi| {
let event = reactor::Event::Join(mi);
tx.send(event)
.map_err(|e| errors::Error::from(format!("join error: {}", e)))
.and_then(move |_| Ok(tls))
});
Box::new(fut_tls)
}
pub(crate) fn process_pull(
tls: tokio_tls::TlsStream<tokio::net::TcpStream>,
msg: protomitch_pb::PullMsg,
ch: mpsc::Sender<reactor::Event>,
) -> FutureTLS {
let fut_tls = future::ok((tls, msg, ch))
.and_then(|(tls, _, ch)| {
let (tx, rx) = oneshot::channel();
let ev = reactor::Event::Snapshot(tx);
ch.send(ev)
.map(|_| (tls, rx))
.map_err(|e| errors::Error::from(format!("pull error: {}", e)))
}).and_then(|(tls, rx)| {
rx.and_then(move |sync| Ok((tls, sync)))
.map_err(|e| errors::Error::from(format!("sync error: {}", e)))
}).and_then(|(tls, sync)| {
protomitch::sync(&sync)
.and_then(|payload| tokio::io::write_all(tls, payload).from_err())
}).map(|(tls, _)| tls);
Box::new(fut_tls)
}
pub(crate) fn tls_connect(
tls_connector: native_tls::TlsConnector,
dst: &net::SocketAddr,
timeout: u64,
) -> FutureTLS {
let fut_tls_connect = tokio::net::TcpStream::connect(dst)
.map_err(errors::Error::from)
.and_then(move |tcp| {
let cx = tokio_tls::TlsConnector::from(tls_connector);
cx.connect("mitch-rs", tcp).from_err()
}).inspect(|_| trace!("TLS connected"))
.timeout(time::Duration::from_secs(timeout))
.map_err(|e| errors::Error::from(format!("tls_connect error: {}", e)));
Box::new(fut_tls_connect)
}
pub(crate) fn read_protomsg(
tls: tokio_tls::TlsStream<tokio::net::TcpStream>,
timeout: u64,
) -> FutureMitchMsg {
let fut_protomsg = future::ok(tls)
.and_then(|tls| {
let buf = vec![0x00; mem::size_of::<u32>()];
tokio::io::read_exact(tls, buf)
}).and_then(|(tls, len)| {
let buflen = io::Cursor::new(len).read_u32::<NetworkEndian>();
future::result(buflen).from_err().and_then(|buflen| {
let buf = vec![0x00; buflen as usize];
tokio::io::read_exact(tls, buf)
})
}).timeout(time::Duration::from_secs(timeout))
.map_err(|e| errors::Error::from(format!("read_protomsg error: {}", e)))
.and_then(|(tls, msg)| protomitch::try_parse_mitchmsg(&msg).map(|mmsg| (tls, mmsg)))
.inspect(|(_tls, protomsg)| trace!("got protomitch {:?}", protomsg));
Box::new(fut_protomsg)
}
| 36.962882 | 100 | 0.541556 |
1d1bbb09aa267f56bd6004d35f5266fa301c1c90 | 193 | #![feature(core_intrinsics)]
fn main() {
// MIN/-1 cannot be represented
unsafe { std::intrinsics::unchecked_div(i16::min_value(), -1); } //~ ERROR Overflow executing `unchecked_div`
}
| 32.166667 | 113 | 0.678756 |
0af2606b3b27bdc6ac15d9b7e2b7ffec0323eeda | 1,685 | /*
*
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LolClashReadyCheckInfo {
#[serde(rename = "acceptError", skip_serializing_if = "Option::is_none")]
pub accept_error: Option<String>,
#[serde(rename = "isAcceptSuccessful", skip_serializing_if = "Option::is_none")]
pub is_accept_successful: Option<bool>,
#[serde(rename = "queueId", skip_serializing_if = "Option::is_none")]
pub queue_id: Option<i32>,
#[serde(rename = "readyCheckResource", skip_serializing_if = "Option::is_none")]
pub ready_check_resource: Option<crate::models::LolClashMatchmakingReadyCheckResource>,
#[serde(rename = "timestampLastClashGameflowDodge", skip_serializing_if = "Option::is_none")]
pub timestamp_last_clash_gameflow_dodge: Option<i64>,
#[serde(rename = "timestampReceived", skip_serializing_if = "Option::is_none")]
pub timestamp_received: Option<i64>,
#[serde(rename = "timestampResponseComplete", skip_serializing_if = "Option::is_none")]
pub timestamp_response_complete: Option<i64>,
}
impl LolClashReadyCheckInfo {
pub fn new() -> LolClashReadyCheckInfo {
LolClashReadyCheckInfo {
accept_error: None,
is_accept_successful: None,
queue_id: None,
ready_check_resource: None,
timestamp_last_clash_gameflow_dodge: None,
timestamp_received: None,
timestamp_response_complete: None,
}
}
}
| 35.851064 | 109 | 0.701484 |
50a651c00a5d3d3f58c471ac0a9605d6e11578da | 524 | //! Tests for the [`Actor`] trait.
use heph::actor::{self, NewActor};
#[test]
fn future_output_result() {
// Actor is implemented for `Future<Output = Result<(), E>>`.
async fn actor(_: actor::Context<(), ()>) -> Result<(), ()> {
Ok(())
}
is_new_actor(actor as fn(_) -> _);
}
#[test]
fn future_output_tuple() {
// Actor is implemented for `Future<Output = ()>`.
async fn actor(_: actor::Context<(), ()>) {}
is_new_actor(actor as fn(_) -> _);
}
fn is_new_actor<NA: NewActor>(_: NA) {}
| 23.818182 | 65 | 0.572519 |
9c91b92e515b1b0ffc22a06fc81a4452a488db1d | 8,401 | //! This module provides the [CommentWriter] which makes it possible
//! to add comments to the written cranelift ir.
//!
//! # Example
//!
//! ```clif
//! test compile
//! target x86_64
//!
//! function u0:0(i64, i64, i64) system_v {
//! ; symbol _ZN119_$LT$example..IsNotEmpty$u20$as$u20$mini_core..FnOnce$LT$$LP$$RF$$u27$a$u20$$RF$$u27$b$u20$$u5b$u16$u5d$$C$$RP$$GT$$GT$9call_once17he85059d5e6a760a0E
//! ; instance Instance { def: Item(DefId(0/0:29 ~ example[8787]::{{impl}}[0]::call_once[0])), substs: [ReErased, ReErased] }
//! ; sig ([IsNotEmpty, (&&[u16],)]; c_variadic: false)->(u8, u8)
//!
//! ; ssa {_2: NOT_SSA, _4: NOT_SSA, _0: NOT_SSA, _3: (empty), _1: NOT_SSA}
//! ; msg loc.idx param pass mode ssa flags ty
//! ; ret _0 = v0 ByRef NOT_SSA (u8, u8)
//! ; arg _1 = v1 ByRef NOT_SSA IsNotEmpty
//! ; arg _2.0 = v2 ByVal(types::I64) NOT_SSA &&[u16]
//!
//! ss0 = explicit_slot 0 ; _1: IsNotEmpty size=0 align=1,8
//! ss1 = explicit_slot 8 ; _2: (&&[u16],) size=8 align=8,8
//! ss2 = explicit_slot 8 ; _4: (&&[u16],) size=8 align=8,8
//! sig0 = (i64, i64, i64) system_v
//! sig1 = (i64, i64, i64) system_v
//! fn0 = colocated u0:6 sig1 ; Instance { def: Item(DefId(0/0:31 ~ example[8787]::{{impl}}[1]::call_mut[0])), substs: [ReErased, ReErased] }
//!
//! block0(v0: i64, v1: i64, v2: i64):
//! v3 = stack_addr.i64 ss0
//! v4 = stack_addr.i64 ss1
//! store v2, v4
//! v5 = stack_addr.i64 ss2
//! jump block1
//!
//! block1:
//! nop
//! ; _3 = &mut _1
//! ; _4 = _2
//! v6 = load.i64 v4
//! store v6, v5
//! ;
//! ; _0 = const mini_core::FnMut::call_mut(move _3, move _4)
//! v7 = load.i64 v5
//! call fn0(v0, v3, v7)
//! jump block2
//!
//! block2:
//! nop
//! ;
//! ; return
//! return
//! }
//! ```
use std::fmt;
use std::io::Write;
use cranelift_codegen::{
entity::SecondaryMap,
ir::{entities::AnyEntity, function::DisplayFunctionAnnotations},
write::{FuncWriter, PlainWriter},
};
use rustc_middle::ty::layout::FnAbiExt;
use rustc_session::config::OutputType;
use rustc_target::abi::call::FnAbi;
use crate::prelude::*;
#[derive(Debug)]
pub(crate) struct CommentWriter {
global_comments: Vec<String>,
entity_comments: FxHashMap<AnyEntity, String>,
}
impl CommentWriter {
pub(crate) fn new<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
let global_comments = if cfg!(debug_assertions) {
vec![
format!("symbol {}", tcx.symbol_name(instance).name),
format!("instance {:?}", instance),
format!("abi {:?}", FnAbi::of_instance(&RevealAllLayoutCx(tcx), instance, &[])),
String::new(),
]
} else {
vec![]
};
CommentWriter { global_comments, entity_comments: FxHashMap::default() }
}
}
#[cfg(debug_assertions)]
impl CommentWriter {
pub(crate) fn add_global_comment<S: Into<String>>(&mut self, comment: S) {
self.global_comments.push(comment.into());
}
pub(crate) fn add_comment<S: Into<String> + AsRef<str>, E: Into<AnyEntity>>(
&mut self,
entity: E,
comment: S,
) {
use std::collections::hash_map::Entry;
match self.entity_comments.entry(entity.into()) {
Entry::Occupied(mut occ) => {
occ.get_mut().push('\n');
occ.get_mut().push_str(comment.as_ref());
}
Entry::Vacant(vac) => {
vac.insert(comment.into());
}
}
}
}
impl FuncWriter for &'_ CommentWriter {
fn write_preamble(
&mut self,
w: &mut dyn fmt::Write,
func: &Function,
reg_info: Option<&isa::RegInfo>,
) -> Result<bool, fmt::Error> {
for comment in &self.global_comments {
if !comment.is_empty() {
writeln!(w, "; {}", comment)?;
} else {
writeln!(w)?;
}
}
if !self.global_comments.is_empty() {
writeln!(w)?;
}
self.super_preamble(w, func, reg_info)
}
fn write_entity_definition(
&mut self,
w: &mut dyn fmt::Write,
_func: &Function,
entity: AnyEntity,
value: &dyn fmt::Display,
) -> fmt::Result {
write!(w, " {} = {}", entity, value)?;
if let Some(comment) = self.entity_comments.get(&entity) {
writeln!(w, " ; {}", comment.replace('\n', "\n; "))
} else {
writeln!(w)
}
}
fn write_block_header(
&mut self,
w: &mut dyn fmt::Write,
func: &Function,
isa: Option<&dyn isa::TargetIsa>,
block: Block,
indent: usize,
) -> fmt::Result {
PlainWriter.write_block_header(w, func, isa, block, indent)
}
fn write_instruction(
&mut self,
w: &mut dyn fmt::Write,
func: &Function,
aliases: &SecondaryMap<Value, Vec<Value>>,
isa: Option<&dyn isa::TargetIsa>,
inst: Inst,
indent: usize,
) -> fmt::Result {
PlainWriter.write_instruction(w, func, aliases, isa, inst, indent)?;
if let Some(comment) = self.entity_comments.get(&inst.into()) {
writeln!(w, "; {}", comment.replace('\n', "\n; "))?;
}
Ok(())
}
}
#[cfg(debug_assertions)]
impl FunctionCx<'_, '_, '_> {
pub(crate) fn add_global_comment<S: Into<String>>(&mut self, comment: S) {
self.clif_comments.add_global_comment(comment);
}
pub(crate) fn add_comment<S: Into<String> + AsRef<str>, E: Into<AnyEntity>>(
&mut self,
entity: E,
comment: S,
) {
self.clif_comments.add_comment(entity, comment);
}
}
pub(crate) fn should_write_ir(tcx: TyCtxt<'_>) -> bool {
tcx.sess.opts.output_types.contains_key(&OutputType::LlvmAssembly)
}
pub(crate) fn write_ir_file<'tcx>(
tcx: TyCtxt<'tcx>,
name: &str,
write: impl FnOnce(&mut dyn Write) -> std::io::Result<()>,
) {
if !should_write_ir(tcx) {
return;
}
let clif_output_dir = tcx.output_filenames(LOCAL_CRATE).with_extension("clif");
match std::fs::create_dir(&clif_output_dir) {
Ok(()) => {}
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {}
res @ Err(_) => res.unwrap(),
}
let clif_file_name = clif_output_dir.join(name);
let res: std::io::Result<()> = try {
let mut file = std::fs::File::create(clif_file_name)?;
write(&mut file)?;
};
if let Err(err) = res {
tcx.sess.warn(&format!("error writing ir file: {}", err));
}
}
pub(crate) fn write_clif_file<'tcx>(
tcx: TyCtxt<'tcx>,
postfix: &str,
isa: Option<&dyn cranelift_codegen::isa::TargetIsa>,
instance: Instance<'tcx>,
context: &cranelift_codegen::Context,
mut clif_comments: &CommentWriter,
) {
write_ir_file(tcx, &format!("{}.{}.clif", tcx.symbol_name(instance).name, postfix), |file| {
let value_ranges =
isa.map(|isa| context.build_value_labels_ranges(isa).expect("value location ranges"));
let mut clif = String::new();
cranelift_codegen::write::decorate_function(
&mut clif_comments,
&mut clif,
&context.func,
&DisplayFunctionAnnotations {
isa: Some(&*crate::build_isa(tcx.sess)),
value_ranges: value_ranges.as_ref(),
},
)
.unwrap();
writeln!(file, "test compile")?;
writeln!(file, "set is_pic")?;
writeln!(file, "set enable_simd")?;
writeln!(file, "target {} haswell", crate::target_triple(tcx.sess))?;
writeln!(file)?;
file.write_all(clif.as_bytes())?;
Ok(())
});
}
impl fmt::Debug for FunctionCx<'_, '_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "{:?}", self.instance.substs)?;
writeln!(f, "{:?}", self.local_map)?;
let mut clif = String::new();
::cranelift_codegen::write::decorate_function(
&mut &self.clif_comments,
&mut clif,
&self.bcx.func,
&DisplayFunctionAnnotations::default(),
)
.unwrap();
writeln!(f, "\n{}", clif)
}
}
| 30.111111 | 168 | 0.54803 |
e9a865c7eacd30e32fe09d44aa610da5bb064f2d | 9,316 | mod argument;
mod attribute;
mod operator;
mod returning;
mod search_path;
pub use argument::Argument;
use attribute::{Attribute, PgxAttributes};
pub use operator::PgOperator;
use operator::{PgxOperatorAttributeWithIdent, PgxOperatorOpName};
use returning::Returning;
use search_path::SearchPathList;
use eyre::WrapErr;
use proc_macro2::{Ident, Span, TokenStream as TokenStream2};
use quote::{quote, ToTokens, TokenStreamExt};
use std::convert::TryFrom;
use syn::parse::{Parse, ParseStream};
use syn::Meta;
/// A parsed `#[pg_extern]` item.
///
/// It should be used with [`syn::parse::Parse`] functions.
///
/// Using [`quote::ToTokens`] will output the declaration for a `pgx::datum::sql_entity_graph::InventoryPgExtern`.
///
/// ```rust
/// use syn::{Macro, parse::Parse, parse_quote, parse};
/// use quote::{quote, ToTokens};
/// use pgx_utils::sql_entity_graph::PgExtern;
///
/// # fn main() -> eyre::Result<()> {
/// let parsed: PgExtern = parse_quote! {
/// fn example(x: Option<str>) -> Option<&'a str> {
/// unimplemented!()
/// }
/// };
/// let sql_graph_entity_tokens = parsed.to_token_stream();
/// # Ok(())
/// # }
/// ```
#[derive(Debug, Clone)]
pub struct PgExtern {
attrs: Option<PgxAttributes>,
func: syn::ItemFn,
}
impl PgExtern {
fn name(&self) -> String {
self.attrs
.as_ref()
.and_then(|a| {
a.attrs.iter().find_map(|candidate| match candidate {
Attribute::Name(name) => Some(name.value()),
_ => None,
})
})
.unwrap_or_else(|| self.func.sig.ident.to_string())
}
fn schema(&self) -> Option<String> {
self.attrs.as_ref().and_then(|a| {
a.attrs.iter().find_map(|candidate| match candidate {
Attribute::Schema(name) => Some(name.value()),
_ => None,
})
})
}
fn extern_attrs(&self) -> Option<&PgxAttributes> {
self.attrs.as_ref()
}
fn overridden(&self) -> Option<String> {
let mut retval = None;
let mut in_commented_sql_block = false;
for attr in &self.func.attrs {
let meta = attr.parse_meta().ok();
if let Some(meta) = meta {
if meta.path().is_ident("doc") {
let content = match meta {
Meta::Path(_) | Meta::List(_) => continue,
Meta::NameValue(mnv) => mnv,
};
if let syn::Lit::Str(inner) = content.lit {
if !in_commented_sql_block && inner.value().trim() == "```pgxsql" {
in_commented_sql_block = true;
} else if in_commented_sql_block && inner.value().trim() == "```" {
in_commented_sql_block = false;
} else if in_commented_sql_block {
let sql = retval.get_or_insert_with(String::default);
let line = inner.value().trim_start().replace(
"@FUNCTION_NAME@",
&*(self.func.sig.ident.to_string() + "_wrapper"),
) + "\n";
sql.push_str(&*line);
}
}
}
}
}
retval
}
fn operator(&self) -> Option<PgOperator> {
let mut skel = Option::<PgOperator>::default();
for attr in &self.func.attrs {
let last_segment = attr.path.segments.last().unwrap();
match last_segment.ident.to_string().as_str() {
"opname" => {
let attr: PgxOperatorOpName = syn::parse2(attr.tokens.clone())
.expect(&format!("Unable to parse {:?}", &attr.tokens));
skel.get_or_insert_with(Default::default)
.opname
.get_or_insert(attr);
}
"commutator" => {
let attr: PgxOperatorAttributeWithIdent = syn::parse2(attr.tokens.clone())
.expect(&format!("Unable to parse {:?}", &attr.tokens));
skel.get_or_insert_with(Default::default)
.commutator
.get_or_insert(attr);
}
"negator" => {
let attr: PgxOperatorAttributeWithIdent = syn::parse2(attr.tokens.clone())
.expect(&format!("Unable to parse {:?}", &attr.tokens));
skel.get_or_insert_with(Default::default)
.negator
.get_or_insert(attr);
}
"join" => {
let attr: PgxOperatorAttributeWithIdent = syn::parse2(attr.tokens.clone())
.expect(&format!("Unable to parse {:?}", &attr.tokens));
skel.get_or_insert_with(Default::default)
.join
.get_or_insert(attr);
}
"restrict" => {
let attr: PgxOperatorAttributeWithIdent = syn::parse2(attr.tokens.clone())
.expect(&format!("Unable to parse {:?}", &attr.tokens));
skel.get_or_insert_with(Default::default)
.restrict
.get_or_insert(attr);
}
"hashes" => {
skel.get_or_insert_with(Default::default).hashes = true;
}
"merges" => {
skel.get_or_insert_with(Default::default).merges = true;
}
_ => (),
}
}
skel
}
fn search_path(&self) -> Option<SearchPathList> {
self.func
.attrs
.iter()
.find(|f| {
f.path
.segments
.first()
.map(|f| f.ident == Ident::new("search_path", Span::call_site()))
.unwrap_or_default()
})
.and_then(|attr| Some(attr.parse_args::<SearchPathList>().unwrap()))
}
fn inputs(&self) -> eyre::Result<Vec<Argument>> {
let mut args = Vec::default();
for input in &self.func.sig.inputs {
let arg = Argument::build(input.clone())
.wrap_err_with(|| format!("Could not map {:?}", input))?;
if let Some(arg) = arg {
args.push(arg);
}
}
Ok(args)
}
fn returns(&self) -> Result<Returning, eyre::Error> {
Returning::try_from(&self.func.sig.output)
}
pub fn new(attr: TokenStream2, item: TokenStream2) -> Result<Self, syn::Error> {
let attrs = syn::parse2::<PgxAttributes>(attr).ok();
let func = syn::parse2::<syn::ItemFn>(item)?;
Ok(Self { attrs, func })
}
}
impl ToTokens for PgExtern {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let ident = &self.func.sig.ident;
let name = self.name();
let schema = self.schema();
let schema_iter = schema.iter();
let extern_attrs = self.extern_attrs();
let search_path = self.search_path().into_iter();
let inputs = self.inputs().unwrap();
let returns = match self.returns() {
Ok(returns) => returns,
Err(e) => {
let msg = e.to_string();
tokens.append_all(quote! {
std::compile_error!(#msg);
});
return;
}
};
let operator = self.operator().into_iter();
let overridden = self.overridden().into_iter();
let sql_graph_entity_fn_name =
syn::Ident::new(&format!("__pgx_internals_fn_{}", ident), Span::call_site());
let inv = quote! {
#[no_mangle]
pub extern "C" fn #sql_graph_entity_fn_name() -> pgx::datum::sql_entity_graph::SqlGraphEntity {
use core::any::TypeId;
let submission = pgx::datum::sql_entity_graph::PgExternEntity {
name: #name,
unaliased_name: stringify!(#ident),
schema: None#( .unwrap_or(Some(#schema_iter)) )*,
file: file!(),
line: line!(),
module_path: core::module_path!(),
full_path: concat!(core::module_path!(), "::", stringify!(#ident)),
extern_attrs: #extern_attrs,
search_path: None#( .unwrap_or(Some(vec![#search_path])) )*,
fn_args: vec![#(#inputs),*],
fn_return: #returns,
operator: None#( .unwrap_or(Some(#operator)) )*,
overridden: None#( .unwrap_or(Some(#overridden)) )*,
};
pgx::datum::sql_entity_graph::SqlGraphEntity::Function(submission)
}
};
tokens.append_all(inv);
}
}
impl Parse for PgExtern {
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
Ok(Self {
attrs: input.parse().ok(),
func: input.parse()?,
})
}
}
| 36.968254 | 114 | 0.488514 |
e2c4b8d7bba2103adf366abaae5b172d23754ddd | 1,541 | use indices::*;
use range::*;
use std::io::*;
use std::process::*;
pub struct Searcher {
command: Command,
}
impl Searcher {
pub fn from_str(s: Vec<String>) -> Searcher {
let mut iter = s.into_iter();
let cmd = iter.next().unwrap();
let mut c = Command::new(cmd);
c.args(&iter.collect::<Vec<_>>());
c.stdin(Stdio::piped());
Searcher { command: c }
}
pub fn test(&mut self, test_lines: &[String]) -> bool {
let mut child = self.command.spawn().expect("could not spawn child process");
{
let mut child_stdin = child.stdin.take().unwrap();
for line in test_lines {
child_stdin
.write_all((line.clone() + "\n").as_bytes())
.expect("failed to write to process stdin");
}
}
child.wait().expect("process did not finish").success()
}
pub fn search<R>(&mut self, full: Vec<String>) -> Option<Vec<String>>
where
R: RangeStrategy,
{
let mut range = Range::<R>::new(full.len());
loop {
match range.next() {
Done(o) => return o.map(|indices| full.select_indices(&indices)),
RunTest(indices) => {
if self.test(&full.clone().select_indices(&indices)) {
range.test_passed(indices);
} else {
range.test_failed(indices);
}
}
}
}
}
}
| 28.537037 | 85 | 0.486048 |
9c1add51ce0e3391e74adfaac326bdc520fc5d69 | 2,133 | use crate::{
cmd,
data::{Nav, State},
};
use druid::{commands, platform_menus, LocalizedString, MenuDesc, MenuItem, SysMods};
#[allow(unused_mut)]
pub fn make_menu() -> MenuDesc<State> {
let mut menu = MenuDesc::empty();
#[cfg(target_os = "macos")]
{
menu = menu.append(make_mac_app_menu());
}
menu.append(make_edit_menu()).append(make_view_menu())
}
fn make_mac_app_menu() -> MenuDesc<State> {
MenuDesc::new(LocalizedString::new("macos-menu-application-menu"))
.append(platform_menus::mac::application::preferences())
.append_separator()
.append(
// TODO:
// This is just overriding `platform_menus::mac::application::quit()`
// because l10n is a bit stupid now.
MenuItem::new(
LocalizedString::new("macos-menu-quit").with_placeholder("Quit Psst"),
commands::QUIT_APP,
)
.hotkey(SysMods::Cmd, "q"),
)
}
fn make_edit_menu() -> MenuDesc<State> {
MenuDesc::new(LocalizedString::new("common-menu-edit-menu").with_placeholder("Edit"))
.append(platform_menus::common::cut())
.append(platform_menus::common::copy())
.append(platform_menus::common::paste())
}
fn make_view_menu() -> MenuDesc<State> {
MenuDesc::new(LocalizedString::new("menu-view-menu").with_placeholder("View"))
.append(
MenuItem::new(
LocalizedString::new("menu-item-home").with_placeholder("Home"),
cmd::NAVIGATE_TO.with(Nav::Home),
)
.hotkey(SysMods::Cmd, "1"),
)
.append(
MenuItem::new(
LocalizedString::new("menu-item-library").with_placeholder("Library"),
cmd::NAVIGATE_TO.with(Nav::Library),
)
.hotkey(SysMods::Cmd, "2"),
)
.append(
MenuItem::new(
LocalizedString::new("menu-item-search").with_placeholder("Search..."),
cmd::SET_FOCUS.to(cmd::WIDGET_SEARCH_INPUT),
)
.hotkey(SysMods::Cmd, "l"),
)
}
| 33.328125 | 89 | 0.566338 |
fe32f1de0c52053306ee6286d2b188a1432f7adc | 16,144 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! See docs in build/expr/mod.rs
use build::{BlockAnd, BlockAndExtension, Builder};
use build::expr::category::{Category, RvalueFunc};
use build::scope::LoopScope;
use hair::*;
use rustc::middle::region::CodeExtent;
use rustc::ty;
use rustc::mir::repr::*;
use syntax::codemap::Span;
impl<'a,'tcx> Builder<'a,'tcx> {
/// Compile `expr`, storing the result into `destination`, which
/// is assumed to be uninitialized.
pub fn into_expr(&mut self,
destination: &Lvalue<'tcx>,
mut block: BasicBlock,
expr: Expr<'tcx>)
-> BlockAnd<()>
{
debug!("into_expr(destination={:?}, block={:?}, expr={:?})",
destination, block, expr);
// since we frequently have to reference `self` from within a
// closure, where `self` would be shadowed, it's easier to
// just use the name `this` uniformly
let this = self;
let expr_span = expr.span;
let scope_id = this.innermost_scope_id();
match expr.kind {
ExprKind::Scope { extent, value } => {
this.in_scope(extent, block, |this, _| this.into(destination, block, value))
}
ExprKind::Block { body: ast_block } => {
this.ast_block(destination, block, ast_block)
}
ExprKind::Match { discriminant, arms } => {
this.match_expr(destination, expr_span, block, discriminant, arms)
}
ExprKind::If { condition: cond_expr, then: then_expr, otherwise: else_expr } => {
let operand = unpack!(block = this.as_operand(block, cond_expr));
let mut then_block = this.cfg.start_new_block();
let mut else_block = this.cfg.start_new_block();
this.cfg.terminate(block, scope_id, expr_span, TerminatorKind::If {
cond: operand,
targets: (then_block, else_block)
});
unpack!(then_block = this.into(destination, then_block, then_expr));
else_block = if let Some(else_expr) = else_expr {
unpack!(this.into(destination, else_block, else_expr))
} else {
// Body of the `if` expression without an `else` clause must return `()`, thus
// we implicitly generate a `else {}` if it is not specified.
let scope_id = this.innermost_scope_id();
this.cfg.push_assign_unit(else_block, scope_id, expr_span, destination);
else_block
};
let join_block = this.cfg.start_new_block();
this.cfg.terminate(then_block,
scope_id,
expr_span,
TerminatorKind::Goto { target: join_block });
this.cfg.terminate(else_block,
scope_id,
expr_span,
TerminatorKind::Goto { target: join_block });
join_block.unit()
}
ExprKind::LogicalOp { op, lhs, rhs } => {
// And:
//
// [block: If(lhs)] -true-> [else_block: If(rhs)] -true-> [true_block]
// | | (false)
// +----------false-----------+------------------> [false_block]
//
// Or:
//
// [block: If(lhs)] -false-> [else_block: If(rhs)] -true-> [true_block]
// | | (false)
// +----------true------------+-------------------> [false_block]
let (true_block, false_block, mut else_block, join_block) =
(this.cfg.start_new_block(), this.cfg.start_new_block(),
this.cfg.start_new_block(), this.cfg.start_new_block());
let lhs = unpack!(block = this.as_operand(block, lhs));
let blocks = match op {
LogicalOp::And => (else_block, false_block),
LogicalOp::Or => (true_block, else_block),
};
this.cfg.terminate(block,
scope_id,
expr_span,
TerminatorKind::If { cond: lhs, targets: blocks });
let rhs = unpack!(else_block = this.as_operand(else_block, rhs));
this.cfg.terminate(else_block, scope_id, expr_span, TerminatorKind::If {
cond: rhs,
targets: (true_block, false_block)
});
this.cfg.push_assign_constant(
true_block, scope_id, expr_span, destination,
Constant {
span: expr_span,
ty: this.hir.bool_ty(),
literal: this.hir.true_literal(),
});
this.cfg.push_assign_constant(
false_block, scope_id, expr_span, destination,
Constant {
span: expr_span,
ty: this.hir.bool_ty(),
literal: this.hir.false_literal(),
});
this.cfg.terminate(true_block,
scope_id,
expr_span,
TerminatorKind::Goto { target: join_block });
this.cfg.terminate(false_block,
scope_id,
expr_span,
TerminatorKind::Goto { target: join_block });
join_block.unit()
}
ExprKind::Loop { condition: opt_cond_expr, body } => {
// [block] --> [loop_block] ~~> [loop_block_end] -1-> [exit_block]
// ^ |
// | 0
// | |
// | v
// [body_block_end] <~~~ [body_block]
//
// If `opt_cond_expr` is `None`, then the graph is somewhat simplified:
//
// [block] --> [loop_block / body_block ] ~~> [body_block_end] [exit_block]
// ^ |
// | |
// +--------------------------+
//
let loop_block = this.cfg.start_new_block();
let exit_block = this.cfg.start_new_block();
// start the loop
this.cfg.terminate(block,
scope_id,
expr_span,
TerminatorKind::Goto { target: loop_block });
let might_break = this.in_loop_scope(loop_block, exit_block, move |this| {
// conduct the test, if necessary
let body_block;
if let Some(cond_expr) = opt_cond_expr {
// This loop has a condition, ergo its exit_block is reachable.
this.find_loop_scope(expr_span, None).might_break = true;
let loop_block_end;
let cond = unpack!(loop_block_end = this.as_operand(loop_block, cond_expr));
body_block = this.cfg.start_new_block();
this.cfg.terminate(loop_block_end,
scope_id,
expr_span,
TerminatorKind::If {
cond: cond,
targets: (body_block, exit_block)
});
} else {
body_block = loop_block;
}
// The “return” value of the loop body must always be an unit, but we cannot
// reuse that as a “return” value of the whole loop expressions, because some
// loops are diverging (e.g. `loop {}`). Thus, we introduce a unit temporary as
// the destination for the loop body and assign the loop’s own “return” value
// immediately after the iteration is finished.
let tmp = this.get_unit_temp();
// Execute the body, branching back to the test.
let body_block_end = unpack!(this.into(&tmp, body_block, body));
this.cfg.terminate(body_block_end,
scope_id,
expr_span,
TerminatorKind::Goto { target: loop_block });
});
// If the loop may reach its exit_block, we assign an empty tuple to the
// destination to keep the MIR well-formed.
if might_break {
this.cfg.push_assign_unit(exit_block, scope_id, expr_span, destination);
}
exit_block.unit()
}
ExprKind::Assign { lhs, rhs } => {
// Note: we evaluate assignments right-to-left. This
// is better for borrowck interaction with overloaded
// operators like x[j] = x[i].
let lhs = this.hir.mirror(lhs);
let lhs_span = lhs.span;
let rhs = unpack!(block = this.as_operand(block, rhs));
let lhs = unpack!(block = this.as_lvalue(block, lhs));
unpack!(block = this.build_drop(block, lhs_span, lhs.clone()));
this.cfg.push_assign(block, scope_id, expr_span, &lhs, Rvalue::Use(rhs));
block.unit()
}
ExprKind::AssignOp { op, lhs, rhs } => {
// FIXME(#28160) there is an interesting semantics
// question raised here -- should we "freeze" the
// value of the lhs here? I'm inclined to think not,
// since it seems closer to the semantics of the
// overloaded version, which takes `&mut self`. This
// only affects weird things like `x += {x += 1; x}`
// -- is that equal to `x + (x + 1)` or `2*(x+1)`?
// As above, RTL.
let rhs = unpack!(block = this.as_operand(block, rhs));
let lhs = unpack!(block = this.as_lvalue(block, lhs));
// we don't have to drop prior contents or anything
// because AssignOp is only legal for Copy types
// (overloaded ops should be desugared into a call).
this.cfg.push_assign(block, scope_id, expr_span, &lhs,
Rvalue::BinaryOp(op,
Operand::Consume(lhs.clone()),
rhs));
block.unit()
}
ExprKind::Continue { label } => {
this.break_or_continue(expr_span, label, block,
|loop_scope| loop_scope.continue_block)
}
ExprKind::Break { label } => {
this.break_or_continue(expr_span, label, block, |loop_scope| {
loop_scope.might_break = true;
loop_scope.break_block
})
}
ExprKind::Return { value } => {
block = match value {
Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)),
None => {
this.cfg.push_assign_unit(block, scope_id,
expr_span, &Lvalue::ReturnPointer);
block
}
};
let extent = this.extent_of_return_scope();
let return_block = this.return_block();
this.exit_scope(expr_span, extent, block, return_block);
this.cfg.start_new_block().unit()
}
ExprKind::Call { ty, fun, args } => {
let diverges = match ty.sty {
ty::TyFnDef(_, _, ref f) | ty::TyFnPtr(ref f) => {
f.sig.0.output.diverges()
}
_ => false
};
let fun = unpack!(block = this.as_operand(block, fun));
let args: Vec<_> =
args.into_iter()
.map(|arg| unpack!(block = this.as_operand(block, arg)))
.collect();
let success = this.cfg.start_new_block();
let cleanup = this.diverge_cleanup();
this.cfg.terminate(block, scope_id, expr_span, TerminatorKind::Call {
func: fun,
args: args,
cleanup: cleanup,
destination: if diverges {
None
} else {
Some ((destination.clone(), success))
}
});
success.unit()
}
// these are the cases that are more naturally handled by some other mode
ExprKind::Unary { .. } |
ExprKind::Binary { .. } |
ExprKind::Box { .. } |
ExprKind::Cast { .. } |
ExprKind::ReifyFnPointer { .. } |
ExprKind::UnsafeFnPointer { .. } |
ExprKind::Unsize { .. } |
ExprKind::Repeat { .. } |
ExprKind::Borrow { .. } |
ExprKind::VarRef { .. } |
ExprKind::SelfRef |
ExprKind::StaticRef { .. } |
ExprKind::Vec { .. } |
ExprKind::Tuple { .. } |
ExprKind::Adt { .. } |
ExprKind::Closure { .. } |
ExprKind::Index { .. } |
ExprKind::Deref { .. } |
ExprKind::Literal { .. } |
ExprKind::InlineAsm { .. } |
ExprKind::Field { .. } => {
debug_assert!(match Category::of(&expr.kind).unwrap() {
Category::Rvalue(RvalueFunc::Into) => false,
_ => true,
});
let rvalue = unpack!(block = this.as_rvalue(block, expr));
this.cfg.push_assign(block, scope_id, expr_span, destination, rvalue);
block.unit()
}
}
}
fn break_or_continue<F>(&mut self,
span: Span,
label: Option<CodeExtent>,
block: BasicBlock,
exit_selector: F)
-> BlockAnd<()>
where F: FnOnce(&mut LoopScope) -> BasicBlock
{
let (exit_block, extent) = {
let loop_scope = self.find_loop_scope(span, label);
(exit_selector(loop_scope), loop_scope.extent)
};
self.exit_scope(span, extent, block, exit_block);
self.cfg.start_new_block().unit()
}
}
| 46.524496 | 100 | 0.445181 |
ac2fc5b2fe562035d3387fc3b1bfaedf7be74bd5 | 12,731 | #![recursion_limit = "1024"]
// Copyright 2019. The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#![cfg_attr(not(debug_assertions), deny(unused_variables))]
#![cfg_attr(not(debug_assertions), deny(unused_imports))]
#![cfg_attr(not(debug_assertions), deny(dead_code))]
#![cfg_attr(not(debug_assertions), deny(unused_extern_crates))]
#![deny(unused_must_use)]
#![deny(unreachable_patterns)]
#![deny(unknown_lints)]
// Enable 'impl Trait' type aliases
#![feature(type_alias_impl_trait)]
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣠⣶⣿⣿⣿⣿⣶⣦⣀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣤⣾⣿⡿⠋⠀⠀⠀⠀⠉⠛⠿⣿⣿⣶⣤⣀⠀⠀⠀⠀⠀⠀⢰⣿⣾⣾⣾⣾⣾⣾⣾⣾⣾⣿⠀⠀⠀⣾⣾⣾⡀⠀⠀⠀⠀⢰⣾⣾⣾⣾⣿⣶⣶⡀⠀⠀⠀⢸⣾⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⣿⣿⣿⣶⣶⣤⣄⡀⠀⠀⠀⠀⠀⠉⠛⣿⣿⠀⠀⠀⠀⠀⠈⠉⠉⠉⠉⣿⣿⡏⠉⠉⠉⠉⠀⠀⣰⣿⣿⣿⣿⠀⠀⠀⠀⢸⣿⣿⠉⠉⠉⠛⣿⣿⡆⠀⠀⢸⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⠀⠀⠀⠈⠙⣿⡿⠿⣿⣿⣿⣶⣶⣤⣤⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⡇⠀⠀⠀⠀⠀⢠⣿⣿⠃⣿⣿⣷⠀⠀⠀⢸⣿⣿⣀⣀⣀⣴⣿⣿⠃⠀⠀⢸⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⣤⠀⠀⠀⢸⣿⡟⠀⠀⠀⠀⠀⠉⣽⣿⣿⠟⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⡇⠀⠀⠀⠀⠀⣿⣿⣿⣤⣬⣿⣿⣆⠀⠀⢸⣿⣿⣿⣿⣿⡿⠟⠉⠀⠀⠀⢸⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⣿⣿⣤⠀⢸⣿⡟⠀⠀⠀⣠⣾⣿⡿⠋⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⡇⠀⠀⠀⠀⣾⣿⣿⠿⠿⠿⢿⣿⣿⡀⠀⢸⣿⣿⠙⣿⣿⣿⣄⠀⠀⠀⠀⢸⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⣿⣿⣼⣿⡟⣀⣶⣿⡿⠋⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣿⡇⠀⠀⠀⣰⣿⣿⠃⠀⠀⠀⠀⣿⣿⣿⠀⢸⣿⣿⠀⠀⠙⣿⣿⣷⣄⠀⠀⢸⣿⣿⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⣿⣿⣿⣿⠛⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
/// ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀
///
/// # Tari Base Node
///
/// The Tari Base Node is a major application in the Tari Network
///
/// ## Running the Tari Base Node
///
/// Tor needs to be started first
/// ```
/// tor --allow-missing-torrc --ignore-missing-torrc \
/// --clientonly 1 --socksport 9050 --controlport 127.0.0.1:9051 \
/// --log "notice stdout" --clientuseipv6 1
/// ```
///
/// For the first run
/// ```cargo run tari_base_node -- --create-id```
///
/// Subsequent runs
/// ```cargo run tari_base_node```
///
/// ## Commands
///
/// `help` - Displays a list of commands
/// `get-balance` - Displays the balance of the wallet (available, pending incoming, pending outgoing)
/// `send-tari` - Sends Tari, the amount needs to be specified, followed by the destination (public key or emoji id) and
/// an optional message `get-chain-metadata` - Lists information about the blockchain of this Base Node
/// `list-peers` - Lists information about peers known by this base node
/// `ban-peer` - Bans a peer
/// `unban-peer` - Removes a ban for a peer
/// `list-connections` - Lists active connections to this Base Node
/// `list-headers` - Lists header information. Either the first header height and the last header height needs to be
/// specified, or the amount of headers from the top `check-db` - Checks the blockchain database for missing blocks and
/// headers `calc-timing` - Calculates the time average time taken to mine a given range of blocks
/// `discover-peer` - Attempts to discover a peer on the network, a public key or emoji id needs to be specified
/// `get-block` - Retrieves a block, the height of the block needs to be specified
/// `get-mempool-stats` - Displays information about the mempool
/// `get-mempool-state` - Displays state information for the mempool
/// `whoami` - Displays identity information about this Base Node and it's wallet
/// `quit` - Exits the Base Node
/// `exit` - Same as quit
/// Used to display tabulated data
#[macro_use]
mod table;
#[macro_use]
mod macros;
mod bootstrap;
mod builder;
mod cli;
mod command_handler;
mod grpc;
mod parser;
mod recovery;
mod utils;
use crate::command_handler::CommandHandler;
use futures::{pin_mut, FutureExt};
use log::*;
use parser::Parser;
use rustyline::{config::OutputStreamType, error::ReadlineError, CompletionType, Config, EditMode, Editor};
use std::{
net::SocketAddr,
sync::Arc,
time::{Duration, Instant},
};
use tari_app_utilities::{
identity_management::setup_node_identity,
initialization::init_configuration,
utilities::{setup_runtime, ExitCodes},
};
use tari_common::configuration::bootstrap::ApplicationType;
use tari_comms::peer_manager::PeerFeatures;
use tari_shutdown::{Shutdown, ShutdownSignal};
use tokio::{task, time};
use tonic::transport::Server;
pub const LOG_TARGET: &str = "base_node::app";
/// Application entry point
fn main() {
match main_inner() {
Ok(_) => std::process::exit(0),
Err(exit_code) => std::process::exit(exit_code.as_i32()),
}
}
/// Sets up the base node and runs the cli_loop
fn main_inner() -> Result<(), ExitCodes> {
let (bootstrap, node_config, _) = init_configuration(ApplicationType::BaseNode)?;
debug!(target: LOG_TARGET, "Using configuration: {:?}", node_config);
// Set up the Tokio runtime
let mut rt = setup_runtime(&node_config).map_err(|err| {
error!(target: LOG_TARGET, "{}", err);
ExitCodes::UnknownError
})?;
// Load or create the Node identity
let node_identity = setup_node_identity(
&node_config.base_node_identity_file,
&node_config.public_address,
bootstrap.create_id,
PeerFeatures::COMMUNICATION_NODE,
)?;
// Exit if create_id or init arguments were run
if bootstrap.create_id {
info!(
target: LOG_TARGET,
"Base node's node ID created at '{}'. Done.",
node_config.base_node_identity_file.to_string_lossy(),
);
return Ok(());
}
// This is the main and only shutdown trigger for the system.
let shutdown = Shutdown::new();
if bootstrap.rebuild_db {
info!(target: LOG_TARGET, "Node is in recovery mode, entering recovery");
recovery::initiate_recover_db(&node_config)?;
let _ = rt.block_on(recovery::run_recovery(&node_config));
return Ok(());
};
if bootstrap.init {
info!(target: LOG_TARGET, "Default configuration created. Done.");
return Ok(());
}
// Build, node, build!
let ctx = rt
.block_on(builder::configure_and_initialize_node(
&node_config,
node_identity,
shutdown.to_signal(),
bootstrap.clean_orphans_db,
))
.map_err(|err| {
error!(target: LOG_TARGET, "{}", err);
ExitCodes::UnknownError
})?;
if node_config.grpc_enabled {
// Go, GRPC , go go
let grpc = crate::grpc::base_node_grpc_server::BaseNodeGrpcServer::new(
rt.handle().clone(),
ctx.local_node(),
ctx.local_mempool(),
node_config.clone(),
ctx.state_machine(),
ctx.base_node_comms().peer_manager(),
);
rt.spawn(run_grpc(grpc, node_config.grpc_base_node_address, shutdown.to_signal()));
}
// Run, node, run!
let base_node_handle;
let command_handler = Arc::new(CommandHandler::new(rt.handle().clone(), &ctx));
if !bootstrap.daemon_mode {
let parser = Parser::new(command_handler.clone());
cli::print_banner(parser.get_commands(), 3);
base_node_handle = rt.spawn(ctx.run());
info!(
target: LOG_TARGET,
"Node has been successfully configured and initialized. Starting CLI loop."
);
rt.spawn(cli_loop(parser, command_handler, shutdown));
} else {
println!("Node has been successfully configured and initialized in daemon mode.");
base_node_handle = rt.spawn(ctx.run());
}
match rt.block_on(base_node_handle) {
Ok(_) => info!(target: LOG_TARGET, "Node shutdown successfully."),
Err(e) => error!(target: LOG_TARGET, "Node has crashed: {}", e),
}
// Wait until tasks have shut down
drop(rt);
println!("Goodbye!");
Ok(())
}
/// Runs the gRPC server
async fn run_grpc(
grpc: crate::grpc::base_node_grpc_server::BaseNodeGrpcServer,
grpc_address: SocketAddr,
interrupt_signal: ShutdownSignal,
) -> Result<(), anyhow::Error>
{
info!(target: LOG_TARGET, "Starting GRPC on {}", grpc_address);
Server::builder()
.add_service(tari_app_grpc::tari_rpc::base_node_server::BaseNodeServer::new(grpc))
.serve_with_shutdown(grpc_address, interrupt_signal.map(|_| ()))
.await
.map_err(|err| {
error!(target: LOG_TARGET, "GRPC encountered an error:{}", err);
err
})?;
info!(target: LOG_TARGET, "Stopping GRPC");
Ok(())
}
async fn read_command(mut rustyline: Editor<Parser>) -> Result<(String, Editor<Parser>), String> {
task::spawn(async {
let readline = rustyline.readline(">> ");
match readline {
Ok(line) => {
rustyline.add_history_entry(line.as_str());
Ok((line, rustyline))
},
Err(ReadlineError::Interrupted) => {
// shutdown section. Will shutdown all interfaces when ctrl-c was pressed
println!("The node is shutting down because Ctrl+C was received...");
info!(
target: LOG_TARGET,
"Termination signal received from user. Shutting node down."
);
Err("Node is shutting down".to_string())
},
Err(err) => {
println!("Error: {:?}", err);
Err(err.to_string())
},
}
})
.await
.expect("Could not spawn rustyline task")
}
/// Runs the Base Node
/// ## Parameters
/// `parser` - The parser to process input commands
/// `shutdown` - The trigger for shutting down
///
/// ## Returns
/// Doesn't return anything
async fn cli_loop(parser: Parser, command_handler: Arc<CommandHandler>, mut shutdown: Shutdown) {
let cli_config = Config::builder()
.history_ignore_space(true)
.completion_type(CompletionType::List)
.edit_mode(EditMode::Emacs)
.output_stream(OutputStreamType::Stdout)
.build();
let mut rustyline = Editor::with_config(cli_config);
rustyline.set_helper(Some(parser));
let read_command_fut = read_command(rustyline).fuse();
pin_mut!(read_command_fut);
let mut shutdown_signal = shutdown.to_signal();
let start_time = Instant::now();
loop {
let delay_time = if start_time.elapsed() < Duration::from_secs(120) {
Duration::from_secs(2)
} else if start_time.elapsed() < Duration::from_secs(300) {
Duration::from_secs(10)
} else {
Duration::from_secs(30)
};
let mut interval = time::delay_for(delay_time).fuse();
futures::select! {
res = read_command_fut => {
match res {
Ok((line, mut rustyline)) => {
if let Some(p) = rustyline.helper_mut().as_deref_mut() {
p.handle_command(line.as_str(), &mut shutdown)
}
read_command_fut.set(read_command(rustyline).fuse());
},
Err(err) => {
// This happens when the node is shutting down.
debug!(target: LOG_TARGET, "Could not read line from rustyline:{}", err);
break;
}
}
},
() = interval => {
command_handler.status();
},
_ = shutdown_signal => {
break;
}
}
}
}
| 37.889881 | 120 | 0.596732 |
f9a220f2113146751380df006e0a708805bb4047 | 6,573 | //! No-op trace impls
//!
//! This implementation is returned as the global tracer if no `Tracer`
//! has been set. It is also useful for testing purposes as it is intended
//! to have minimal resource utilization and runtime impact.
use crate::trace::TraceResult;
use crate::{
sdk::export::trace::{ExportResult, SpanData, SpanExporter},
trace,
trace::{SpanBuilder, TraceContextExt, TraceFlags, TraceState},
Context, KeyValue,
};
use async_trait::async_trait;
use std::time::SystemTime;
/// A no-op instance of a `TracerProvider`.
#[derive(Clone, Debug, Default)]
pub struct NoopTracerProvider {
_private: (),
}
impl NoopTracerProvider {
/// Create a new no-op tracer provider
pub fn new() -> Self {
NoopTracerProvider { _private: () }
}
}
impl trace::TracerProvider for NoopTracerProvider {
type Tracer = NoopTracer;
/// Returns a new `NoopTracer` instance.
fn tracer(&self, _name: &'static str, _version: Option<&'static str>) -> Self::Tracer {
NoopTracer::new()
}
/// Return an empty `Vec` as there isn't any span processors in `NoopTracerProvider`
fn force_flush(&self) -> Vec<TraceResult<()>> {
Vec::new()
}
}
/// A no-op instance of a `Span`.
#[derive(Clone, Debug)]
pub struct NoopSpan {
span_context: trace::SpanContext,
}
impl Default for NoopSpan {
fn default() -> Self {
NoopSpan::new()
}
}
impl NoopSpan {
/// Creates a new `NoopSpan` instance.
pub fn new() -> Self {
NoopSpan {
span_context: trace::SpanContext::new(
trace::TraceId::invalid(),
trace::SpanId::invalid(),
TraceFlags::default(),
false,
TraceState::default(),
),
}
}
}
impl trace::Span for NoopSpan {
/// Ignores all events
fn add_event(&mut self, _name: String, _attributes: Vec<KeyValue>) {
// Ignore
}
/// Ignores all events with timestamps
fn add_event_with_timestamp(
&mut self,
_name: String,
_timestamp: SystemTime,
_attributes: Vec<KeyValue>,
) {
// Ignored
}
/// Returns an invalid `SpanContext`.
fn span_context(&self) -> &trace::SpanContext {
&self.span_context
}
/// Returns false, signifying that this span is never recording.
fn is_recording(&self) -> bool {
false
}
/// Ignores all attributes
fn set_attribute(&mut self, _attribute: KeyValue) {
// Ignored
}
/// Ignores status
fn set_status(&mut self, _code: trace::StatusCode, _message: String) {
// Ignored
}
/// Ignores name updates
fn update_name(&mut self, _new_name: String) {
// Ignored
}
/// Ignores `Span` endings
fn end_with_timestamp(&mut self, _timestamp: SystemTime) {
// Ignored
}
}
/// A no-op instance of a `Tracer`.
#[derive(Clone, Debug, Default)]
pub struct NoopTracer {
_private: (),
}
impl NoopTracer {
/// Create a new no-op tracer
pub fn new() -> Self {
NoopTracer { _private: () }
}
}
impl trace::Tracer for NoopTracer {
type Span = NoopSpan;
/// Returns a `NoopSpan` as they are always invalid.
fn invalid(&self) -> Self::Span {
NoopSpan::new()
}
/// Starts a new `NoopSpan` with a given context.
///
/// If the context contains a valid span, it's span context is propagated.
fn start_with_context<T>(&self, name: T, cx: Context) -> Self::Span
where
T: Into<std::borrow::Cow<'static, str>>,
{
self.build(SpanBuilder::from_name_with_context(name, cx))
}
/// Starts a `SpanBuilder`.
fn span_builder<T>(&self, name: T) -> trace::SpanBuilder
where
T: Into<std::borrow::Cow<'static, str>>,
{
trace::SpanBuilder::from_name(name)
}
/// Builds a `NoopSpan` from a `SpanBuilder`.
///
/// If the span builder or the context's current span contains a valid span context, it is
/// propagated.
fn build(&self, builder: trace::SpanBuilder) -> Self::Span {
let cx = builder.parent_context;
if cx.has_active_span() {
NoopSpan {
span_context: cx.span().span_context().clone(),
}
} else {
self.invalid()
}
}
}
/// A no-op instance of an [`SpanExporter`].
///
/// [`SpanExporter`]: crate::sdk::export::trace::SpanExporter
#[derive(Debug, Default)]
pub struct NoopSpanExporter {
_private: (),
}
impl NoopSpanExporter {
/// Create a new noop span exporter
pub fn new() -> Self {
NoopSpanExporter { _private: () }
}
}
#[async_trait]
impl SpanExporter for NoopSpanExporter {
async fn export(&mut self, _batch: Vec<SpanData>) -> ExportResult {
Ok(())
}
}
#[cfg(all(test, feature = "testing", feature = "trace"))]
mod tests {
use super::*;
use crate::testing::trace::TestSpan;
use crate::trace::{self, Span, Tracer};
fn valid_span_context() -> trace::SpanContext {
trace::SpanContext::new(
trace::TraceId::from_u128(42),
trace::SpanId::from_u64(42),
trace::TraceFlags::default(),
true,
TraceState::default(),
)
}
#[test]
fn noop_tracer_defaults_to_invalid_span() {
let tracer = NoopTracer::new();
let span = tracer.start_with_context("foo", Context::new());
assert!(!span.span_context().is_valid());
}
#[test]
fn noop_tracer_propagates_valid_span_context_from_builder() {
let tracer = NoopTracer::new();
let builder = tracer
.span_builder("foo")
.with_parent_context(Context::new().with_span(TestSpan(valid_span_context())));
let span = tracer.build(builder);
assert!(span.span_context().is_valid());
}
#[test]
fn noop_tracer_propagates_valid_span_context_from_explicitly_specified_context() {
let tracer = NoopTracer::new();
let cx = Context::new().with_span(NoopSpan {
span_context: valid_span_context(),
});
let span = tracer.start_with_context("foo", cx);
assert!(span.span_context().is_valid());
}
#[test]
fn noop_tracer_propagates_valid_span_context_from_remote_span_context() {
let tracer = NoopTracer::new();
let cx = Context::new().with_remote_span_context(valid_span_context());
let span = tracer.start_with_context("foo", cx);
assert!(span.span_context().is_valid());
}
}
| 26.828571 | 94 | 0.602008 |
eba410ae2bfd10b5cd5a0e3b38d49c3af870486c | 3,788 | //! Non-interactive Zero Knowledge proof for correct decryption
//! share generation. We use the notation and scheme presented in
//! Figure 12 of the Treasury voting protocol spec.
//!
//! The proof is the following:
//!
//! `NIZK{(pk, C, D), (sk): D = e1^sk AND pk = g^sk}`
//!
//! where `C = (e1, e2)`.
//! This can be translated to the following proof:
//!
//! `NIZK{(g, pk, e1, D), (sk): D = e1^sk AND pk = g^sk}`
//!
//! which is a proof of discrete log equality. We can therefore proof
//! correct decryption using a proof of discrete log equality.
use super::super::dl_equality::DleqZkp;
use crate::cryptography::{Ciphertext, PublicKey, SecretKey};
use crate::GroupElement;
use rand::{CryptoRng, RngCore};
/// Proof of correct decryption.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Zkp {
vshare_proof: DleqZkp,
}
impl Zkp {
pub(crate) const PROOF_SIZE: usize = DleqZkp::BYTES_LEN;
/// Generate a valid share zero knowledge proof.
pub fn generate<R>(
c: &Ciphertext,
pk: &PublicKey,
share: &GroupElement,
sk: &SecretKey,
rng: &mut R,
) -> Self
where
R: CryptoRng + RngCore,
{
let vshare_proof = DleqZkp::generate(
&GroupElement::generator(),
&c.e1,
&pk.pk,
&share,
&sk.sk,
rng,
);
Zkp { vshare_proof }
}
/// Verify a valid share zero knowledge proof
pub fn verify(&self, c: &Ciphertext, share: &GroupElement, pk: &PublicKey) -> bool {
self.vshare_proof
.verify(&GroupElement::generator(), &c.e1, &pk.pk, &share)
}
pub fn to_bytes(&self) -> [u8; Self::PROOF_SIZE] {
self.vshare_proof.to_bytes()
}
#[allow(dead_code)]
pub fn write_to_bytes(&self, output: &mut [u8]) {
assert_eq!(output.len(), Self::PROOF_SIZE);
self.vshare_proof.write_to_bytes(output);
}
pub fn from_bytes(slice: &[u8]) -> Option<Self> {
if slice.len() != Self::PROOF_SIZE {
return None;
}
let vshare_proof = DleqZkp::from_bytes(slice)?;
let proof = Zkp { vshare_proof };
Some(proof)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cryptography::Keypair;
use rand_chacha::ChaCha20Rng;
use rand_core::SeedableRng;
#[test]
pub fn it_works() {
let mut r = ChaCha20Rng::from_seed([0u8; 32]);
let keypair = Keypair::generate(&mut r);
let plaintext = GroupElement::from_hash(&[0u8]);
let ciphertext = keypair.public_key.encrypt_point(&plaintext, &mut r);
let share = &ciphertext.e1 * &keypair.secret_key.sk;
let proof = Zkp::generate(
&ciphertext,
&keypair.public_key,
&share,
&keypair.secret_key,
&mut r,
);
let verified = proof.verify(&ciphertext, &share, &keypair.public_key);
assert!(verified);
}
#[test]
fn serialisation() {
let mut r = ChaCha20Rng::from_seed([0u8; 32]);
let keypair = Keypair::generate(&mut r);
let plaintext = GroupElement::from_hash(&[0u8]);
let ciphertext = keypair.public_key.encrypt_point(&plaintext, &mut r);
let share = &ciphertext.e1 * &keypair.secret_key.sk;
let proof = Zkp::generate(
&ciphertext,
&keypair.public_key,
&share,
&keypair.secret_key,
&mut r,
);
let serialised_proof = proof.to_bytes();
let deseriliased_proof = Zkp::from_bytes(&serialised_proof);
assert!(deseriliased_proof.is_some());
assert!(deseriliased_proof
.unwrap()
.verify(&ciphertext, &share, &keypair.public_key));
}
}
| 28.059259 | 88 | 0.582629 |
146cad00085a8033b4ee79e02bacc963a42720b3 | 92 | #[derive(Debug, Eq, PartialEq, Clone, Copy)]
pub enum Network {
Mainnet,
Testnet,
}
| 15.333333 | 44 | 0.641304 |
50f2df4abaa32f61142ec92f7384678f5d1bf4d4 | 12,629 | use instruction_def::*;
use test::run_test;
use Operand::*;
use Reg::*;
use RegScale::*;
use RegType::*;
use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
#[test]
fn vpternlogq_1() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(XMM2)),
operand2: Some(Direct(XMM6)),
operand3: Some(Direct(XMM4)),
operand4: Some(Literal8(91)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K6),
broadcast: None,
},
&[98, 243, 205, 142, 37, 212, 91],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_2() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(XMM1)),
operand2: Some(Direct(XMM4)),
operand3: Some(IndirectScaledIndexedDisplaced(
EAX,
EDX,
Four,
1150027217,
Some(OperandSize::Xmmword),
None,
)),
operand4: Some(Literal8(122)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 243, 221, 138, 37, 140, 144, 209, 5, 140, 68, 122],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_3() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(XMM4)),
operand2: Some(Direct(XMM7)),
operand3: Some(IndirectScaledIndexedDisplaced(
EAX,
EBX,
Four,
1366727432,
Some(OperandSize::Qword),
None,
)),
operand4: Some(Literal8(76)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K7),
broadcast: Some(BroadcastMode::Broadcast1To2),
},
&[98, 243, 197, 159, 37, 164, 152, 8, 155, 118, 81, 76],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_4() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(XMM23)),
operand2: Some(Direct(XMM0)),
operand3: Some(Direct(XMM25)),
operand4: Some(Literal8(77)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 131, 253, 143, 37, 249, 77],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_5() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(XMM21)),
operand2: Some(Direct(XMM25)),
operand3: Some(IndirectScaledDisplaced(
RCX,
Eight,
1036717633,
Some(OperandSize::Xmmword),
None,
)),
operand4: Some(Literal8(30)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 227, 181, 135, 37, 44, 205, 65, 14, 203, 61, 30],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_6() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(XMM24)),
operand2: Some(Direct(XMM28)),
operand3: Some(IndirectScaledIndexedDisplaced(
RSI,
RSI,
Eight,
2076984848,
Some(OperandSize::Qword),
None,
)),
operand4: Some(Literal8(50)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K3),
broadcast: Some(BroadcastMode::Broadcast1To2),
},
&[98, 99, 157, 147, 37, 132, 246, 16, 70, 204, 123, 50],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_7() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(YMM6)),
operand2: Some(Direct(YMM7)),
operand3: Some(Direct(YMM7)),
operand4: Some(Literal8(88)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K5),
broadcast: None,
},
&[98, 243, 197, 173, 37, 247, 88],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_8() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(YMM4)),
operand2: Some(Direct(YMM3)),
operand3: Some(IndirectScaledDisplaced(
EDI,
Eight,
820498244,
Some(OperandSize::Ymmword),
None,
)),
operand4: Some(Literal8(37)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K4),
broadcast: None,
},
&[98, 243, 229, 172, 37, 36, 253, 68, 207, 231, 48, 37],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_9() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(YMM3)),
operand2: Some(Direct(YMM1)),
operand3: Some(IndirectScaledIndexed(
EDX,
EAX,
Two,
Some(OperandSize::Qword),
None,
)),
operand4: Some(Literal8(52)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K5),
broadcast: Some(BroadcastMode::Broadcast1To4),
},
&[98, 243, 245, 189, 37, 28, 66, 52],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_10() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(YMM28)),
operand2: Some(Direct(YMM28)),
operand3: Some(Direct(YMM20)),
operand4: Some(Literal8(124)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 35, 157, 162, 37, 228, 124],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_11() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(YMM20)),
operand2: Some(Direct(YMM20)),
operand3: Some(IndirectScaledDisplaced(
RCX,
Four,
719779310,
Some(OperandSize::Ymmword),
None,
)),
operand4: Some(Literal8(97)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K1),
broadcast: None,
},
&[98, 227, 221, 161, 37, 36, 141, 238, 245, 230, 42, 97],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_12() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(YMM15)),
operand2: Some(Direct(YMM30)),
operand3: Some(IndirectScaledIndexedDisplaced(
RAX,
RSI,
Four,
410403112,
Some(OperandSize::Qword),
None,
)),
operand4: Some(Literal8(39)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K5),
broadcast: Some(BroadcastMode::Broadcast1To4),
},
&[98, 115, 141, 181, 37, 188, 176, 40, 65, 118, 24, 39],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_13() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(ZMM3)),
operand2: Some(Direct(ZMM7)),
operand3: Some(Direct(ZMM6)),
operand4: Some(Literal8(17)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 243, 197, 202, 37, 222, 17],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_14() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(ZMM4)),
operand2: Some(Direct(ZMM1)),
operand3: Some(IndirectScaledIndexed(
ECX,
ESI,
Eight,
Some(OperandSize::Zmmword),
None,
)),
operand4: Some(Literal8(126)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K4),
broadcast: None,
},
&[98, 243, 245, 204, 37, 36, 241, 126],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_15() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(ZMM4)),
operand2: Some(Direct(ZMM6)),
operand3: Some(Indirect(EAX, Some(OperandSize::Qword), None)),
operand4: Some(Literal8(108)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K2),
broadcast: Some(BroadcastMode::Broadcast1To8),
},
&[98, 243, 205, 218, 37, 32, 108],
OperandSize::Dword,
)
}
#[test]
fn vpternlogq_16() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(ZMM2)),
operand2: Some(Direct(ZMM24)),
operand3: Some(Direct(ZMM11)),
operand4: Some(Literal8(20)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 211, 189, 199, 37, 211, 20],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_17() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(ZMM6)),
operand2: Some(Direct(ZMM4)),
operand3: Some(IndirectScaledDisplaced(
RDI,
Two,
543214567,
Some(OperandSize::Zmmword),
None,
)),
operand4: Some(Literal8(31)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K4),
broadcast: None,
},
&[98, 243, 221, 204, 37, 52, 125, 231, 203, 96, 32, 31],
OperandSize::Qword,
)
}
#[test]
fn vpternlogq_18() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTERNLOGQ,
operand1: Some(Direct(ZMM19)),
operand2: Some(Direct(ZMM10)),
operand3: Some(IndirectDisplaced(
RAX,
952645621,
Some(OperandSize::Qword),
None,
)),
operand4: Some(Literal8(101)),
lock: false,
rounding_mode: None,
merge_mode: Some(MergeMode::Zero),
sae: false,
mask: Some(MaskReg::K7),
broadcast: Some(BroadcastMode::Broadcast1To8),
},
&[98, 227, 173, 223, 37, 152, 245, 55, 200, 56, 101],
OperandSize::Qword,
)
}
| 27.756044 | 95 | 0.479373 |
dd0234416130fa026fda6cb13a7b4551d76ef0be | 3,153 | use async_trait::async_trait;
use log::trace;
use nvim_rs::{Handler, Neovim};
use rmpv::Value;
use crate::bridge::clipboard::{get_remote_clipboard, set_remote_clipboard};
#[cfg(windows)]
use crate::bridge::ui_commands::{ParallelCommand, UiCommand};
use crate::{
bridge::{events::parse_redraw_event, TxWrapper},
editor::EditorCommand,
error_handling::ResultPanicExplanation,
event_aggregator::EVENT_AGGREGATOR,
running_tracker::*,
settings::SETTINGS,
};
#[derive(Clone)]
pub struct NeovimHandler {}
impl NeovimHandler {
pub fn new() -> Self {
Self {}
}
}
#[async_trait]
impl Handler for NeovimHandler {
type Writer = TxWrapper;
async fn handle_request(
&self,
event_name: String,
_arguments: Vec<Value>,
neovim: Neovim<TxWrapper>,
) -> Result<Value, Value> {
trace!("Neovim request: {:?}", &event_name);
match event_name.as_ref() {
"neovide.get_clipboard" => {
let endline_type = neovim
.command_output("set ff")
.await
.ok()
.and_then(|format| {
let mut s = format.split('=');
s.next();
s.next().map(String::from)
});
get_remote_clipboard(endline_type.as_deref())
.map_err(|_| Value::from("cannot get remote clipboard content"))
}
_ => Ok(Value::from("rpcrequest not handled")),
}
}
async fn handle_notify(
&self,
event_name: String,
arguments: Vec<Value>,
_neovim: Neovim<TxWrapper>,
) {
trace!("Neovim notification: {:?}", &event_name);
match event_name.as_ref() {
"redraw" => {
for events in arguments {
let parsed_events = parse_redraw_event(events)
.unwrap_or_explained_panic("Could not parse event from neovim");
for parsed_event in parsed_events {
EVENT_AGGREGATOR.send(EditorCommand::NeovimRedrawEvent(parsed_event));
}
}
}
"setting_changed" => {
SETTINGS.handle_changed_notification(arguments);
}
"neovide.quit" => {
let error_code = arguments[0]
.as_i64()
.expect("Could not parse error code from neovim");
RUNNING_TRACKER.quit_with_code(error_code as i32, "Quit from neovim");
}
#[cfg(windows)]
"neovide.register_right_click" => {
EVENT_AGGREGATOR.send(UiCommand::Parallel(ParallelCommand::RegisterRightClick));
}
#[cfg(windows)]
"neovide.unregister_right_click" => {
EVENT_AGGREGATOR.send(UiCommand::Parallel(ParallelCommand::UnregisterRightClick));
}
"neovide.set_clipboard" => {
set_remote_clipboard(arguments).ok();
}
_ => {}
}
}
}
| 31.217822 | 98 | 0.531874 |
0871bedc83d11b94d68e29618da384a365d99160 | 6,143 | // Copyright 2019 The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::handles::ServiceHandlesFuture;
use derive_error::Error;
use futures::{Future, FutureExt};
use std::pin::Pin;
use tari_shutdown::ShutdownSignal;
use tokio::runtime;
#[derive(Debug, Error)]
pub enum ServiceInitializationError {
// General error for failed initialization.
// Specialized errors should be added and used if appropriate.
#[error(msg_embedded, non_std, no_from)]
Failed(String),
}
/// Implementors of this trait will initialize a service
/// The `StackBuilder` builds impls of this trait.
pub trait ServiceInitializer {
/// The future returned from the initialize function
type Future: Future<Output = Result<(), ServiceInitializationError>>;
/// Async initialization code for a service
fn initialize(
&mut self,
executor: runtime::Handle,
handles_fut: ServiceHandlesFuture,
shutdown: ShutdownSignal,
) -> Self::Future;
/// Create a boxed version of this ServiceInitializer.
fn boxed(self) -> BoxedServiceInitializer
where
Self: Sized + Send + 'static,
Self::Future: Send + 'static,
{
BoxedServiceInitializer::new(self)
}
}
/// Implementation of ServiceInitializer for any function matching the signature of `ServiceInitializer::initialize`
/// This allows the following "short-hand" syntax to be used:
///
/// ```edition2018
/// # use tari_service_framework::handles::ServiceHandlesFuture;
/// # use tokio::runtime;
/// let my_initializer = |executor: runtime::Handle, handles_fut: ServiceHandlesFuture| {
/// // initialization code
/// futures::future::ready(Result::<_, ()>::Ok(()))
/// };
/// ```
impl<TFunc, TFut> ServiceInitializer for TFunc
where
TFunc: FnMut(runtime::Handle, ServiceHandlesFuture, ShutdownSignal) -> TFut,
TFut: Future<Output = Result<(), ServiceInitializationError>>,
{
type Future = TFut;
fn initialize(
&mut self,
executor: runtime::Handle,
handles: ServiceHandlesFuture,
shutdown: ShutdownSignal,
) -> Self::Future
{
(self)(executor, handles, shutdown)
}
}
//---------------------------------- Boxed Service Initializer --------------------------------------------//
// The following code is essentially a substitute for async trait functions. Any initializer can
// converted to the boxed form by using ServiceInitializer::boxed(). This is done for you when
// using `StackBuilder::add_initializer`.
/// A pinned, boxed form of the future resulting from a boxed ServiceInitializer
type ServiceInitializationFuture = Pin<Box<dyn Future<Output = Result<(), ServiceInitializationError>> + Send>>;
/// This trait mirrors the ServiceInitializer trait, with the exception
/// of always returning a boxed future (aliased ServiceInitializationFuture type),
/// therefore it does not need the `Future` associated type. This makes it
/// possible to store a boxed dyn `AbstractServiceInitializer<TName, TExec>`.
pub trait AbstractServiceInitializer {
fn initialize(
&mut self,
executor: runtime::Handle,
handles_fut: ServiceHandlesFuture,
shutdown: ShutdownSignal,
) -> ServiceInitializationFuture;
}
/// AbstractServiceInitializer impl for every T: ServiceInitializer.
impl<T> AbstractServiceInitializer for T
where
T: ServiceInitializer,
T::Future: Send + 'static,
{
fn initialize(
&mut self,
executor: runtime::Handle,
handles: ServiceHandlesFuture,
shutdown: ShutdownSignal,
) -> ServiceInitializationFuture
{
let initialization = self.initialize(executor, handles, shutdown);
initialization.boxed() as ServiceInitializationFuture
}
}
/// A concrete boxed version of a ServiceInitializer. This makes it possible
/// to have a collection of ServiceInitializers which return various boxed future types.
/// This type is used in StackBuilder's internal vec.
pub struct BoxedServiceInitializer {
inner: Box<dyn AbstractServiceInitializer + Send + 'static>,
}
impl BoxedServiceInitializer {
pub(super) fn new<T>(initializer: T) -> Self
where
T: ServiceInitializer + Send + 'static,
T::Future: Send + 'static,
{
Self {
inner: Box::new(initializer),
}
}
}
impl ServiceInitializer for BoxedServiceInitializer {
type Future = ServiceInitializationFuture;
fn initialize(
&mut self,
executor: runtime::Handle,
handles_fut: ServiceHandlesFuture,
shutdown: ShutdownSignal,
) -> Self::Future
{
self.inner.initialize(executor, handles_fut, shutdown)
}
}
| 37.919753 | 118 | 0.7091 |
1e9c059b9de4edb1a844d848854e41267899014d | 4,862 | #[cfg(not(feature = "library"))]
use cosmwasm_std::entry_point;
use cosmwasm_std::{to_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdResult};
use crate::error::ContractError;
use crate::msg::{CountResponse, ExecuteMsg, InstantiateMsg, QueryMsg};
use crate::state::{State, STATE};
#[cfg_attr(not(feature = "library"), entry_point)]
pub fn instantiate(
deps: DepsMut,
_env: Env,
info: MessageInfo,
msg: InstantiateMsg,
) -> Result<Response, ContractError> {
let state = State {
count: msg.count,
owner: info.sender.clone(),
};
STATE.save(deps.storage, &state)?;
Ok(Response::new()
.add_attribute("method", "instantiate")
.add_attribute("owner", info.sender)
.add_attribute("count", msg.count.to_string()))
}
#[cfg_attr(not(feature = "library"), entry_point)]
pub fn execute(
deps: DepsMut,
_env: Env,
info: MessageInfo,
msg: ExecuteMsg,
) -> Result<Response, ContractError> {
match msg {
ExecuteMsg::Borrow {} => try_increment(deps),
ExecuteMsg::Increment {} => try_increment(deps),
ExecuteMsg::Reset { count } => try_reset(deps, info, count),
}
}
pub fn try_increment(deps: DepsMut) -> Result<Response, ContractError> {
STATE.update(deps.storage, |mut state| -> Result<_, ContractError> {
state.count += 1;
Ok(state)
})?;
Ok(Response::new().add_attribute("method", "try_increment"))
}
pub fn try_reset(deps: DepsMut, info: MessageInfo, count: i32) -> Result<Response, ContractError> {
STATE.update(deps.storage, |mut state| -> Result<_, ContractError> {
if info.sender != state.owner {
return Err(ContractError::Unauthorized {});
}
state.count = count;
Ok(state)
})?;
Ok(Response::new().add_attribute("method", "reset"))
}
#[cfg_attr(not(feature = "library"), entry_point)]
pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {
match msg {
QueryMsg::GetCount {} => to_binary(&query_count(deps)?),
}
}
fn query_count(deps: Deps) -> StdResult<CountResponse> {
let state = STATE.load(deps.storage)?;
Ok(CountResponse { count: state.count })
}
#[cfg(test)]
mod tests {
use super::*;
use cosmwasm_std::testing::{mock_dependencies, mock_env, mock_info};
use cosmwasm_std::{coins, from_binary};
#[test]
fn proper_initialization() {
let mut deps = mock_dependencies(&[]);
let msg = InstantiateMsg { count: 17 };
let info = mock_info("creator", &coins(1000, "earth"));
// we can just call .unwrap() to assert this was a success
let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
assert_eq!(0, res.messages.len());
// it worked, let's query the state
let res = query(deps.as_ref(), mock_env(), QueryMsg::GetCount {}).unwrap();
let value: CountResponse = from_binary(&res).unwrap();
assert_eq!(17, value.count);
}
#[test]
fn increment() {
let mut deps = mock_dependencies(&coins(2, "token"));
let msg = InstantiateMsg { count: 17 };
let info = mock_info("creator", &coins(2, "token"));
let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
// beneficiary can release it
let info = mock_info("anyone", &coins(2, "token"));
let msg = ExecuteMsg::Increment {};
let _res = execute(deps.as_mut(), mock_env(), info, msg).unwrap();
// should increase counter by 1
let res = query(deps.as_ref(), mock_env(), QueryMsg::GetCount {}).unwrap();
let value: CountResponse = from_binary(&res).unwrap();
assert_eq!(18, value.count);
}
#[test]
fn reset() {
let mut deps = mock_dependencies(&coins(2, "token"));
let msg = InstantiateMsg { count: 17 };
let info = mock_info("creator", &coins(2, "token"));
let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
// beneficiary can release it
let unauth_info = mock_info("anyone", &coins(2, "token"));
let msg = ExecuteMsg::Reset { count: 5 };
let res = execute(deps.as_mut(), mock_env(), unauth_info, msg);
match res {
Err(ContractError::Unauthorized {}) => {}
_ => panic!("Must return unauthorized error"),
}
// only the original creator can reset the counter
let auth_info = mock_info("creator", &coins(2, "token"));
let msg = ExecuteMsg::Reset { count: 5 };
let _res = execute(deps.as_mut(), mock_env(), auth_info, msg).unwrap();
// should now be 5
let res = query(deps.as_ref(), mock_env(), QueryMsg::GetCount {}).unwrap();
let value: CountResponse = from_binary(&res).unwrap();
assert_eq!(5, value.count);
}
}
| 34 | 99 | 0.60942 |
8a105e4578c7fdc4d2a32699f2e2eb99a828a74e | 2,101 | //
// Copyright (c) 2019 RepliXio Ltd. All rights reserved.
// Use is subject to license terms.
//
#![doc(html_root_url = "https://docs.rs/pmdk-sys/0.9.2")]
use libc::{c_char, c_int, c_uint, c_void};
use libc::{mode_t, size_t};
mod base;
pub mod obj;
pub use base::PMEMoid;
#[link(name = "pmem", kind = "static")]
extern "C" {
// Most commonly used functions:
pub fn pmem_is_pmem(addr: *const c_void, len: size_t) -> c_int;
pub fn pmem_persist(addr: *const c_void, len: size_t);
pub fn pmem_msync(addr: *const c_void, len: size_t) -> c_int;
pub fn pmem_map_file(
path: *const c_char,
len: size_t,
flags: c_int,
mode: mode_t,
mapped_lenp: *mut size_t,
is_pmemp: *mut c_int,
) -> *mut c_void;
pub fn pmem_unmap(addr: *mut c_void, len: size_t) -> c_int;
// Partial flushing operations:
pub fn pmem_flush(addr: *const c_void, len: size_t);
pub fn pmem_drain();
pub fn pmem_has_hw_drain() -> c_int;
// Copying to persistent memory:
pub fn pmem_memmove_persist(
pmemdest: *mut c_void,
src: *const c_void,
len: size_t,
) -> *mut c_void;
pub fn pmem_memcpy_persist(
pmemdest: *mut c_void,
src: *const c_void,
len: size_t,
) -> *mut c_void;
pub fn pmem_memset_persist(pmemdest: *mut c_void, c: c_int, len: size_t) -> *mut c_void;
pub fn pmem_memmove_nodrain(
pmemdest: *mut c_void,
src: *const c_void,
len: size_t,
) -> *mut c_void;
pub fn pmem_memcpy_nodrain(
pmemdest: *mut c_void,
src: *const c_void,
len: size_t,
) -> *mut c_void;
pub fn pmem_memset_nodrain(pmemdest: *mut c_void, c: c_int, len: size_t) -> *mut c_void;
// Library API versioning:
pub fn pmem_check_version(major_required: c_uint, minor_required: c_uint) -> *const c_char;
// Error handling:
pub fn pmem_errormsg() -> *const c_char;
}
#[link(name = "pmempool", kind = "static")]
extern "C" {
// Utils
pub fn pmempool_rm(path: *const c_char, flags: c_int) -> c_int;
}
| 26.2625 | 95 | 0.616849 |
cc676f8a234e4ae878640da973505aafa0a0ae82 | 1,773 | use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::io::SeekFrom;
use pretty_hex::{config_hex, HexConfig};
use calypso_filety::ccff;
use ccff::{hl::*, ll::CcffHeader};
fn main() {
let mut args = env::args();
if let Some(file) = args.nth(1) {
let mut file = File::open(file).expect("Failed to open file");
let mut container =
ContainerFile::decode(CcffHeader::read(&mut file).expect("Failed to load CCFF file"));
file.seek(SeekFrom::Start(0)).expect("Failed to seek file");
container
.read_all(&mut file)
.expect("Failed to read CCFF section data");
dump_cc(container);
} else {
eprintln!("usage: readccff <FILE>");
return;
}
}
fn dump_cc(container: ContainerFile) {
println!("=== metadata ===");
println!("=> ABI version: {}", container.get_abi());
println!("=> File type: {}", container.get_filety());
println!("=== sections ===");
for (idx, section) in container.sections().enumerate() {
let config = HexConfig {
title: false,
ascii: true,
group: 0,
width: 16,
..HexConfig::simple()
};
println!(":: idx {}", idx);
println!(" => name: {}", section.get_name());
println!(" => type: 0x{:x}", section.get_stype());
println!(" => flags: 0x{:x}", section.get_flags());
println!(" => offset: 0x{:x}", section.get_offset().unwrap());
println!(
" => size: 0x{:x}",
section.get_data().unwrap().len()
);
println!(
" => hexdump:\n{}",
config_hex(§ion.get_data().unwrap(), config)
);
}
}
| 31.660714 | 98 | 0.513254 |
9bbd991671273ea35bf07001920ffe0e71e59234 | 2,343 | // Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use failure::Error;
use fuchsia_wayland_core as wl;
use wayland::*;
/// The set of pixel formats that will be announced to clients.
const SUPPORTED_PIXEL_FORMATS: &[wl_shm::Format] =
&[wl_shm::Format::Argb8888, wl_shm::Format::Xrgb8888];
pub struct Shm;
impl Shm {
pub fn new() -> Self {
Shm
}
/// Posts an event back to the client for each supported SHM pixel format.
pub fn post_formats(&self, this: wl::ObjectId, client: &wl::Client) -> Result<(), Error> {
for format in SUPPORTED_PIXEL_FORMATS.iter() {
client.post(this, WlShmEvent::Format { format: *format })?;
}
Ok(())
}
}
impl wl::RequestReceiver<WlShm> for Shm {
fn receive(
_this: wl::ObjectRef<Self>, request: WlShmRequest, client: &mut wl::Client,
) -> Result<(), Error> {
let WlShmRequest::CreatePool { id, size, .. } = request;
println!("wl_shm::create_pool(id: {}, fd, size: {})", id.id(), size);
id.implement(client, ShmPool::new())?;
Ok(())
}
}
pub struct ShmPool;
impl ShmPool {
pub fn new() -> Self {
ShmPool
}
}
impl wl::RequestReceiver<WlShmPool> for ShmPool {
fn receive(
_this: wl::ObjectRef<Self>, request: WlShmPoolRequest, _client: &mut wl::Client,
) -> Result<(), Error> {
match request {
WlShmPoolRequest::Destroy {} => {
println!("wl_shm_pool::destroy");
}
WlShmPoolRequest::CreateBuffer {
id,
offset,
width,
height,
stride,
format,
} => {
println!(
"wl_shm_pool::create_buffer(id: {}, offset: {}, width: {}, height: {}, \
stride: {}, format: {:?})",
id.id(),
offset,
width,
height,
stride,
format
);
}
WlShmPoolRequest::Resize { size } => {
println!("wl_shm_pool::resize(size: {})", size);
}
}
Ok(())
}
}
| 28.573171 | 94 | 0.509176 |
e9b31194219b7177568da2e521213391e2fa86db | 1,412 | // Copyright 2016 Alex Regueiro
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use libc::{self, c_char, c_void};
use std::ffi::CStr;
use std::ptr;
pub fn error_message(ptr: *const c_char) -> String {
let cstr = unsafe { CStr::from_ptr(ptr as *const _) };
let s = String::from_utf8_lossy(cstr.to_bytes()).into_owned();
unsafe {
libc::free(ptr as *mut c_void);
}
s
}
pub fn opt_bytes_to_ptr(opt: Option<&[u8]>) -> *const c_char {
match opt {
Some(v) => v.as_ptr() as *const c_char,
None => ptr::null(),
}
}
macro_rules! ffi_try {
( $($function:ident)::*( $( $arg:expr ),* ) ) => ({
let mut err: *mut ::libc::c_char = ::std::ptr::null_mut();
let result = $($function)::*($($arg),*, &mut err);
if !err.is_null() {
return Err(Error::new($crate::ffi_util::error_message(err)));
}
result
})
}
| 30.695652 | 75 | 0.628187 |
89079545f7016c4c62e76a2d97ee214bffbd863f | 1,306 | use crate::gui::WallpaperMessage;
use crate::style::{inactive_style, make_button};
use iced::{button, Row};
use std::collections::HashSet;
use wallapi::types::XYCombo;
#[derive(Debug, Clone)]
pub(crate) struct RatioMenu {
button_states: Vec<(XYCombo, button::State)>,
}
impl Default for RatioMenu {
fn default() -> Self {
let button_states = wallapi::types::ASPECT_RATIOS
.iter()
.map(|aspect_ratio| (aspect_ratio.clone(), button::State::new()))
.collect();
Self { button_states }
}
}
fn get_is_toggled(option: &XYCombo, selections: &Option<HashSet<XYCombo>>) -> bool {
match selections {
None => false,
Some(options) => options.contains(option),
}
}
impl RatioMenu {
pub(crate) fn build_ratio_row(
&mut self,
selected_ratios: &Option<HashSet<XYCombo>>,
) -> Row<WallpaperMessage> {
self.button_states
.iter_mut()
.fold(Row::new(), |row, (ratio, state)| {
row.push(
make_button(state, &ratio.to_string())
.style(inactive_style(get_is_toggled(ratio, selected_ratios)))
.on_press(WallpaperMessage::AspectRatioSelected(ratio.clone())),
)
})
}
}
| 29.022222 | 88 | 0.58193 |
2f69ab57db8fb81c34484629054385b2244a4414 | 11,365 | use std;
use std::fmt::{self, Debug, Display};
use std::iter::FromIterator;
use std::slice;
use std::vec;
use proc_macro2::{
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
};
#[cfg(feature = "printing")]
use quote::ToTokens;
#[cfg(feature = "parsing")]
use crate::buffer::Cursor;
use crate::thread::ThreadBound;
/// The result of a Syn parser.
pub type Result<T> = std::result::Result<T, Error>;
/// Error returned when a Syn parser cannot parse the input tokens.
///
/// # Error reporting in proc macros
///
/// The correct way to report errors back to the compiler from a procedural
/// macro is by emitting an appropriately spanned invocation of
/// [`compile_error!`] in the generated code. This produces a better diagnostic
/// message than simply panicking the macro.
///
/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
///
/// When parsing macro input, the [`parse_macro_input!`] macro handles the
/// conversion to `compile_error!` automatically.
///
/// ```
/// extern crate proc_macro;
///
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
///
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_attribute]
/// # };
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
/// let args = parse_macro_input!(args as AttributeArgs);
/// let input = parse_macro_input!(input as ItemFn);
///
/// /* ... */
/// # TokenStream::new()
/// }
/// ```
///
/// For errors that arise later than the initial parsing stage, the
/// [`.to_compile_error()`] method can be used to perform an explicit conversion
/// to `compile_error!`.
///
/// [`.to_compile_error()`]: Error::to_compile_error
///
/// ```
/// # extern crate proc_macro;
/// #
/// # use proc_macro::TokenStream;
/// # use syn::{parse_macro_input, DeriveInput};
/// #
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_derive(MyDerive)]
/// # };
/// pub fn my_derive(input: TokenStream) -> TokenStream {
/// let input = parse_macro_input!(input as DeriveInput);
///
/// // fn(DeriveInput) -> syn::Result<proc_macro2::TokenStream>
/// expand::my_derive(input)
/// .unwrap_or_else(|err| err.to_compile_error())
/// .into()
/// }
/// #
/// # mod expand {
/// # use proc_macro2::TokenStream;
/// # use syn::{DeriveInput, Result};
/// #
/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
/// # unimplemented!()
/// # }
/// # }
/// ```
#[derive(Clone)]
pub struct Error {
messages: Vec<ErrorMessage>,
}
struct ErrorMessage {
// Span is implemented as an index into a thread-local interner to keep the
// size small. It is not safe to access from a different thread. We want
// errors to be Send and Sync to play nicely with the Failure crate, so pin
// the span we're given to its original thread and assume it is
// Span::call_site if accessed from any other thread.
start_span: ThreadBound<Span>,
end_span: ThreadBound<Span>,
message: String,
}
#[cfg(test)]
struct _Test
where
Error: Send + Sync;
impl Error {
/// Usually the [`ParseStream::error`] method will be used instead, which
/// automatically uses the correct span from the current position of the
/// parse stream.
///
/// Use `Error::new` when the error needs to be triggered on some span other
/// than where the parse stream is currently positioned.
///
/// [`ParseStream::error`]: crate::parse::ParseBuffer::error
///
/// # Example
///
/// ```
/// use syn::{Error, Ident, LitStr, Result, Token};
/// use syn::parse::ParseStream;
///
/// // Parses input that looks like `name = "string"` where the key must be
/// // the identifier `name` and the value may be any string literal.
/// // Returns the string literal.
/// fn parse_name(input: ParseStream) -> Result<LitStr> {
/// let name_token: Ident = input.parse()?;
/// if name_token != "name" {
/// // Trigger an error not on the current position of the stream,
/// // but on the position of the unexpected identifier.
/// return Err(Error::new(name_token.span(), "expected `name`"));
/// }
/// input.parse::<Token![=]>()?;
/// let s: LitStr = input.parse()?;
/// Ok(s)
/// }
/// ```
pub fn new<T: Display>(span: Span, message: T) -> Self {
Error {
messages: vec![ErrorMessage {
start_span: ThreadBound::new(span),
end_span: ThreadBound::new(span),
message: message.to_string(),
}],
}
}
/// Creates an error with the specified message spanning the given syntax
/// tree node.
///
/// Unlike the `Error::new` constructor, this constructor takes an argument
/// `tokens` which is a syntax tree node. This allows the resulting `Error`
/// to attempt to span all tokens inside of `tokens`. While you would
/// typically be able to use the `Spanned` trait with the above `Error::new`
/// constructor, implementation limitations today mean that
/// `Error::new_spanned` may provide a higher-quality error message on
/// stable Rust.
///
/// When in doubt it's recommended to stick to `Error::new` (or
/// `ParseStream::error`)!
#[cfg(feature = "printing")]
pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
let mut iter = tokens.into_token_stream().into_iter();
let start = iter.next().map_or_else(Span::call_site, |t| t.span());
let end = iter.last().map_or(start, |t| t.span());
Error {
messages: vec![ErrorMessage {
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message: message.to_string(),
}],
}
}
/// The source location of the error.
///
/// Spans are not thread-safe so this function returns `Span::call_site()`
/// if called from a different thread than the one on which the `Error` was
/// originally created.
pub fn span(&self) -> Span {
let start = match self.messages[0].start_span.get() {
Some(span) => *span,
None => return Span::call_site(),
};
let end = match self.messages[0].end_span.get() {
Some(span) => *span,
None => return Span::call_site(),
};
start.join(end).unwrap_or(start)
}
/// Render the error as an invocation of [`compile_error!`].
///
/// The [`parse_macro_input!`] macro provides a convenient way to invoke
/// this method correctly in a procedural macro.
///
/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
pub fn to_compile_error(&self) -> TokenStream {
self.messages
.iter()
.map(ErrorMessage::to_compile_error)
.collect()
}
/// Add another error message to self such that when `to_compile_error()` is
/// called, both errors will be emitted together.
pub fn combine(&mut self, another: Error) {
self.messages.extend(another.messages)
}
}
impl ErrorMessage {
fn to_compile_error(&self) -> TokenStream {
let start = self
.start_span
.get()
.cloned()
.unwrap_or_else(Span::call_site);
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
// compile_error!($message)
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("compile_error", start)),
TokenTree::Punct({
let mut punct = Punct::new('!', Spacing::Alone);
punct.set_span(start);
punct
}),
TokenTree::Group({
let mut group = Group::new(Delimiter::Brace, {
TokenStream::from_iter(vec![TokenTree::Literal({
let mut string = Literal::string(&self.message);
string.set_span(end);
string
})])
});
group.set_span(end);
group
}),
])
}
}
#[cfg(feature = "parsing")]
pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
if cursor.eof() {
Error::new(scope, format!("unexpected end of input, {}", message))
} else {
let span = crate::buffer::open_span_of_group(cursor);
Error::new(span, message)
}
}
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
Error {
messages: vec![ErrorMessage {
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message: message.to_string(),
}],
}
}
impl Debug for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.messages.len() == 1 {
formatter
.debug_tuple("Error")
.field(&self.messages[0])
.finish()
} else {
formatter
.debug_tuple("Error")
.field(&self.messages)
.finish()
}
}
}
impl Debug for ErrorMessage {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.message, formatter)
}
}
impl Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(&self.messages[0].message)
}
}
impl Clone for ErrorMessage {
fn clone(&self) -> Self {
let start = self
.start_span
.get()
.cloned()
.unwrap_or_else(Span::call_site);
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
ErrorMessage {
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message: self.message.clone(),
}
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
"parse error"
}
}
impl From<LexError> for Error {
fn from(err: LexError) -> Self {
Error::new(Span::call_site(), format!("{:?}", err))
}
}
impl IntoIterator for Error {
type Item = Error;
type IntoIter = IntoIter;
fn into_iter(self) -> Self::IntoIter {
IntoIter {
messages: self.messages.into_iter(),
}
}
}
pub struct IntoIter {
messages: vec::IntoIter<ErrorMessage>,
}
impl Iterator for IntoIter {
type Item = Error;
fn next(&mut self) -> Option<Self::Item> {
Some(Error {
messages: vec![self.messages.next()?],
})
}
}
impl<'a> IntoIterator for &'a Error {
type Item = Error;
type IntoIter = Iter<'a>;
fn into_iter(self) -> Self::IntoIter {
Iter {
messages: self.messages.iter(),
}
}
}
pub struct Iter<'a> {
messages: slice::Iter<'a, ErrorMessage>,
}
impl<'a> Iterator for Iter<'a> {
type Item = Error;
fn next(&mut self) -> Option<Self::Item> {
Some(Error {
messages: vec![self.messages.next()?.clone()],
})
}
}
| 30.799458 | 93 | 0.577475 |
9ce1d594262f65a1dd590a1bfb4f5198a2305330 | 230 | #![allow(unused_variables)]
use std::mem;
pub fn chars() {
println!("\n>> {}\n", "Chars");
let single_character = 'x';
println!("{} is char, size = {} bytes", single_character, mem::size_of_val(&single_character));
} | 25.555556 | 99 | 0.617391 |
39eba0825fc3c351c77c8e2d81471d1bda069497 | 37,956 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for 64-bits floats (`f64` type)
#![allow(missing_doc)]
use prelude::*;
use cast;
use default::Default;
use from_str::FromStr;
use libc::{c_int};
use num::{FPCategory, FPNaN, FPInfinite , FPZero, FPSubnormal, FPNormal};
use num::{Zero, One, Bounded, strconv};
use num;
use intrinsics;
#[allow(dead_code)]
mod cmath {
use libc::{c_double, c_int};
#[link_name = "m"]
extern {
pub fn acos(n: c_double) -> c_double;
pub fn asin(n: c_double) -> c_double;
pub fn atan(n: c_double) -> c_double;
pub fn atan2(a: c_double, b: c_double) -> c_double;
pub fn cbrt(n: c_double) -> c_double;
pub fn cosh(n: c_double) -> c_double;
pub fn erf(n: c_double) -> c_double;
pub fn erfc(n: c_double) -> c_double;
pub fn expm1(n: c_double) -> c_double;
pub fn fdim(a: c_double, b: c_double) -> c_double;
pub fn fmax(a: c_double, b: c_double) -> c_double;
pub fn fmin(a: c_double, b: c_double) -> c_double;
pub fn fmod(a: c_double, b: c_double) -> c_double;
pub fn nextafter(x: c_double, y: c_double) -> c_double;
pub fn frexp(n: c_double, value: &mut c_int) -> c_double;
pub fn hypot(x: c_double, y: c_double) -> c_double;
pub fn ldexp(x: c_double, n: c_int) -> c_double;
pub fn logb(n: c_double) -> c_double;
pub fn log1p(n: c_double) -> c_double;
pub fn ilogb(n: c_double) -> c_int;
pub fn modf(n: c_double, iptr: &mut c_double) -> c_double;
pub fn sinh(n: c_double) -> c_double;
pub fn tan(n: c_double) -> c_double;
pub fn tanh(n: c_double) -> c_double;
pub fn tgamma(n: c_double) -> c_double;
// These are commonly only available for doubles
pub fn j0(n: c_double) -> c_double;
pub fn j1(n: c_double) -> c_double;
pub fn jn(i: c_int, n: c_double) -> c_double;
pub fn y0(n: c_double) -> c_double;
pub fn y1(n: c_double) -> c_double;
pub fn yn(i: c_int, n: c_double) -> c_double;
#[cfg(unix)]
pub fn lgamma_r(n: c_double, sign: &mut c_int) -> c_double;
#[cfg(windows)]
#[link_name="__lgamma_r"]
pub fn lgamma_r(n: c_double, sign: &mut c_int) -> c_double;
}
}
// FIXME (#1433): obtain these in a different way
// FIXME(#11621): These constants should be deprecated once CTFE is implemented
// in favour of calling their respective functions in `Bounded` and `Float`.
pub static RADIX: uint = 2u;
pub static MANTISSA_DIGITS: uint = 53u;
pub static DIGITS: uint = 15u;
pub static EPSILON: f64 = 2.2204460492503131e-16_f64;
pub static MIN_VALUE: f64 = 2.2250738585072014e-308_f64;
pub static MAX_VALUE: f64 = 1.7976931348623157e+308_f64;
pub static MIN_EXP: int = -1021;
pub static MAX_EXP: int = 1024;
pub static MIN_10_EXP: int = -307;
pub static MAX_10_EXP: int = 308;
pub static NAN: f64 = 0.0_f64/0.0_f64;
pub static INFINITY: f64 = 1.0_f64/0.0_f64;
pub static NEG_INFINITY: f64 = -1.0_f64/0.0_f64;
// FIXME (#1999): add is_normal, is_subnormal, and fpclassify
/// Various useful constants.
pub mod consts {
// FIXME (requires Issue #1433 to fix): replace with mathematical
// constants from cmath.
// FIXME(#11621): These constants should be deprecated once CTFE is
// implemented in favour of calling their respective functions in `Float`.
/// Archimedes' constant
pub static PI: f64 = 3.14159265358979323846264338327950288_f64;
/// pi/2.0
pub static FRAC_PI_2: f64 = 1.57079632679489661923132169163975144_f64;
/// pi/4.0
pub static FRAC_PI_4: f64 = 0.785398163397448309615660845819875721_f64;
/// 1.0/pi
pub static FRAC_1_PI: f64 = 0.318309886183790671537767526745028724_f64;
/// 2.0/pi
pub static FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;
/// 2.0/sqrt(pi)
pub static FRAC_2_SQRTPI: f64 = 1.12837916709551257389615890312154517_f64;
/// sqrt(2.0)
pub static SQRT2: f64 = 1.41421356237309504880168872420969808_f64;
/// 1.0/sqrt(2.0)
pub static FRAC_1_SQRT2: f64 = 0.707106781186547524400844362104849039_f64;
/// Euler's number
pub static E: f64 = 2.71828182845904523536028747135266250_f64;
/// log2(e)
pub static LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
/// log10(e)
pub static LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
/// ln(2.0)
pub static LN_2: f64 = 0.693147180559945309417232121458176568_f64;
/// ln(10.0)
pub static LN_10: f64 = 2.30258509299404568401799145468436421_f64;
}
impl Num for f64 {}
#[cfg(not(test))]
impl Eq for f64 {
#[inline]
fn eq(&self, other: &f64) -> bool { (*self) == (*other) }
}
#[cfg(not(test))]
impl Ord for f64 {
#[inline]
fn lt(&self, other: &f64) -> bool { (*self) < (*other) }
#[inline]
fn le(&self, other: &f64) -> bool { (*self) <= (*other) }
#[inline]
fn ge(&self, other: &f64) -> bool { (*self) >= (*other) }
#[inline]
fn gt(&self, other: &f64) -> bool { (*self) > (*other) }
}
impl Default for f64 {
#[inline]
fn default() -> f64 { 0.0 }
}
impl Zero for f64 {
#[inline]
fn zero() -> f64 { 0.0 }
/// Returns true if the number is equal to either `0.0` or `-0.0`
#[inline]
fn is_zero(&self) -> bool { *self == 0.0 || *self == -0.0 }
}
impl One for f64 {
#[inline]
fn one() -> f64 { 1.0 }
}
#[cfg(not(test))]
impl Add<f64,f64> for f64 {
#[inline]
fn add(&self, other: &f64) -> f64 { *self + *other }
}
#[cfg(not(test))]
impl Sub<f64,f64> for f64 {
#[inline]
fn sub(&self, other: &f64) -> f64 { *self - *other }
}
#[cfg(not(test))]
impl Mul<f64,f64> for f64 {
#[inline]
fn mul(&self, other: &f64) -> f64 { *self * *other }
}
#[cfg(not(test))]
impl Div<f64,f64> for f64 {
#[inline]
fn div(&self, other: &f64) -> f64 { *self / *other }
}
#[cfg(not(test))]
impl Rem<f64,f64> for f64 {
#[inline]
fn rem(&self, other: &f64) -> f64 {
unsafe { cmath::fmod(*self, *other) }
}
}
#[cfg(not(test))]
impl Neg<f64> for f64 {
#[inline]
fn neg(&self) -> f64 { -*self }
}
impl Signed for f64 {
/// Computes the absolute value. Returns `NAN` if the number is `NAN`.
#[inline]
fn abs(&self) -> f64 {
unsafe { intrinsics::fabsf64(*self) }
}
/// The positive difference of two numbers. Returns `0.0` if the number is less than or
/// equal to `other`, otherwise the difference between`self` and `other` is returned.
#[inline]
fn abs_sub(&self, other: &f64) -> f64 {
unsafe { cmath::fdim(*self, *other) }
}
/// # Returns
///
/// - `1.0` if the number is positive, `+0.0` or `INFINITY`
/// - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// - `NAN` if the number is NaN
#[inline]
fn signum(&self) -> f64 {
if self.is_nan() { NAN } else {
unsafe { intrinsics::copysignf64(1.0, *self) }
}
}
/// Returns `true` if the number is positive, including `+0.0` and `INFINITY`
#[inline]
fn is_positive(&self) -> bool { *self > 0.0 || (1.0 / *self) == INFINITY }
/// Returns `true` if the number is negative, including `-0.0` and `NEG_INFINITY`
#[inline]
fn is_negative(&self) -> bool { *self < 0.0 || (1.0 / *self) == NEG_INFINITY }
}
impl Bounded for f64 {
#[inline]
fn min_value() -> f64 { 2.2250738585072014e-308 }
#[inline]
fn max_value() -> f64 { 1.7976931348623157e+308 }
}
impl Primitive for f64 {}
impl Float for f64 {
#[inline]
fn nan() -> f64 { 0.0 / 0.0 }
#[inline]
fn infinity() -> f64 { 1.0 / 0.0 }
#[inline]
fn neg_infinity() -> f64 { -1.0 / 0.0 }
#[inline]
fn neg_zero() -> f64 { -0.0 }
/// Returns `true` if the number is NaN
#[inline]
fn is_nan(self) -> bool { self != self }
/// Returns `true` if the number is infinite
#[inline]
fn is_infinite(self) -> bool {
self == Float::infinity() || self == Float::neg_infinity()
}
/// Returns `true` if the number is neither infinite or NaN
#[inline]
fn is_finite(self) -> bool {
!(self.is_nan() || self.is_infinite())
}
/// Returns `true` if the number is neither zero, infinite, subnormal or NaN
#[inline]
fn is_normal(self) -> bool {
self.classify() == FPNormal
}
/// Returns the floating point category of the number. If only one property
/// is going to be tested, it is generally faster to use the specific
/// predicate instead.
fn classify(self) -> FPCategory {
static EXP_MASK: u64 = 0x7ff0000000000000;
static MAN_MASK: u64 = 0x000fffffffffffff;
let bits: u64 = unsafe { cast::transmute(self) };
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => FPZero,
(_, 0) => FPSubnormal,
(0, EXP_MASK) => FPInfinite,
(_, EXP_MASK) => FPNaN,
_ => FPNormal,
}
}
#[inline]
fn mantissa_digits(_: Option<f64>) -> uint { 53 }
#[inline]
fn digits(_: Option<f64>) -> uint { 15 }
#[inline]
fn epsilon() -> f64 { 2.2204460492503131e-16 }
#[inline]
fn min_exp(_: Option<f64>) -> int { -1021 }
#[inline]
fn max_exp(_: Option<f64>) -> int { 1024 }
#[inline]
fn min_10_exp(_: Option<f64>) -> int { -307 }
#[inline]
fn max_10_exp(_: Option<f64>) -> int { 308 }
/// Constructs a floating point number by multiplying `x` by 2 raised to the
/// power of `exp`
#[inline]
fn ldexp(x: f64, exp: int) -> f64 {
unsafe { cmath::ldexp(x, exp as c_int) }
}
/// Breaks the number into a normalized fraction and a base-2 exponent,
/// satisfying:
///
/// - `self = x * pow(2, exp)`
/// - `0.5 <= abs(x) < 1.0`
#[inline]
fn frexp(self) -> (f64, int) {
unsafe {
let mut exp = 0;
let x = cmath::frexp(self, &mut exp);
(x, exp as int)
}
}
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8) {
let bits: u64 = unsafe { cast::transmute(self) };
let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;
let mantissa = if exponent == 0 {
(bits & 0xfffffffffffff) << 1
} else {
(bits & 0xfffffffffffff) | 0x10000000000000
};
// Exponent bias + mantissa shift
exponent -= 1023 + 52;
(mantissa, exponent, sign)
}
/// Returns the next representable floating-point value in the direction of
/// `other`.
#[inline]
fn next_after(self, other: f64) -> f64 {
unsafe { cmath::nextafter(self, other) }
}
/// Round half-way cases toward `NEG_INFINITY`
#[inline]
fn floor(self) -> f64 {
unsafe { intrinsics::floorf64(self) }
}
/// Round half-way cases toward `INFINITY`
#[inline]
fn ceil(self) -> f64 {
unsafe { intrinsics::ceilf64(self) }
}
/// Round half-way cases away from `0.0`
#[inline]
fn round(self) -> f64 {
unsafe { intrinsics::roundf64(self) }
}
/// The integer part of the number (rounds towards `0.0`)
#[inline]
fn trunc(self) -> f64 {
unsafe { intrinsics::truncf64(self) }
}
/// The fractional part of the number, satisfying:
///
/// ```rust
/// let x = 1.65f64;
/// assert!(x == x.trunc() + x.fract())
/// ```
#[inline]
fn fract(self) -> f64 { self - self.trunc() }
#[inline]
fn max(self, other: f64) -> f64 {
unsafe { cmath::fmax(self, other) }
}
#[inline]
fn min(self, other: f64) -> f64 {
unsafe { cmath::fmin(self, other) }
}
/// Fused multiply-add. Computes `(self * a) + b` with only one rounding
/// error. This produces a more accurate result with better performance than
/// a separate multiplication operation followed by an add.
#[inline]
fn mul_add(self, a: f64, b: f64) -> f64 {
unsafe { intrinsics::fmaf64(self, a, b) }
}
/// The reciprocal (multiplicative inverse) of the number
#[inline]
fn recip(self) -> f64 { 1.0 / self }
#[inline]
fn powf(self, n: f64) -> f64 {
unsafe { intrinsics::powf64(self, n) }
}
#[inline]
fn powi(self, n: i32) -> f64 {
unsafe { intrinsics::powif64(self, n) }
}
/// sqrt(2.0)
#[inline]
fn sqrt2() -> f64 { 1.41421356237309504880168872420969808 }
/// 1.0 / sqrt(2.0)
#[inline]
fn frac_1_sqrt2() -> f64 { 0.707106781186547524400844362104849039 }
#[inline]
fn sqrt(self) -> f64 {
unsafe { intrinsics::sqrtf64(self) }
}
#[inline]
fn rsqrt(self) -> f64 { self.sqrt().recip() }
#[inline]
fn cbrt(self) -> f64 {
unsafe { cmath::cbrt(self) }
}
#[inline]
fn hypot(self, other: f64) -> f64 {
unsafe { cmath::hypot(self, other) }
}
/// Archimedes' constant
#[inline]
fn pi() -> f64 { 3.14159265358979323846264338327950288 }
/// 2.0 * pi
#[inline]
fn two_pi() -> f64 { 6.28318530717958647692528676655900576 }
/// pi / 2.0
#[inline]
fn frac_pi_2() -> f64 { 1.57079632679489661923132169163975144 }
/// pi / 3.0
#[inline]
fn frac_pi_3() -> f64 { 1.04719755119659774615421446109316763 }
/// pi / 4.0
#[inline]
fn frac_pi_4() -> f64 { 0.785398163397448309615660845819875721 }
/// pi / 6.0
#[inline]
fn frac_pi_6() -> f64 { 0.52359877559829887307710723054658381 }
/// pi / 8.0
#[inline]
fn frac_pi_8() -> f64 { 0.39269908169872415480783042290993786 }
/// 1.0 / pi
#[inline]
fn frac_1_pi() -> f64 { 0.318309886183790671537767526745028724 }
/// 2.0 / pi
#[inline]
fn frac_2_pi() -> f64 { 0.636619772367581343075535053490057448 }
/// 2.0 / sqrt(pi)
#[inline]
fn frac_2_sqrtpi() -> f64 { 1.12837916709551257389615890312154517 }
#[inline]
fn sin(self) -> f64 {
unsafe { intrinsics::sinf64(self) }
}
#[inline]
fn cos(self) -> f64 {
unsafe { intrinsics::cosf64(self) }
}
#[inline]
fn tan(self) -> f64 {
unsafe { cmath::tan(self) }
}
#[inline]
fn asin(self) -> f64 {
unsafe { cmath::asin(self) }
}
#[inline]
fn acos(self) -> f64 {
unsafe { cmath::acos(self) }
}
#[inline]
fn atan(self) -> f64 {
unsafe { cmath::atan(self) }
}
#[inline]
fn atan2(self, other: f64) -> f64 {
unsafe { cmath::atan2(self, other) }
}
/// Simultaneously computes the sine and cosine of the number
#[inline]
fn sin_cos(self) -> (f64, f64) {
(self.sin(), self.cos())
}
/// Euler's number
#[inline]
fn e() -> f64 { 2.71828182845904523536028747135266250 }
/// log2(e)
#[inline]
fn log2_e() -> f64 { 1.44269504088896340735992468100189214 }
/// log10(e)
#[inline]
fn log10_e() -> f64 { 0.434294481903251827651128918916605082 }
/// ln(2.0)
#[inline]
fn ln_2() -> f64 { 0.693147180559945309417232121458176568 }
/// ln(10.0)
#[inline]
fn ln_10() -> f64 { 2.30258509299404568401799145468436421 }
/// Returns the exponential of the number
#[inline]
fn exp(self) -> f64 {
unsafe { intrinsics::expf64(self) }
}
/// Returns 2 raised to the power of the number
#[inline]
fn exp2(self) -> f64 {
unsafe { intrinsics::exp2f64(self) }
}
/// Returns the exponential of the number, minus `1`, in a way that is
/// accurate even if the number is close to zero
#[inline]
fn exp_m1(self) -> f64 {
unsafe { cmath::expm1(self) }
}
/// Returns the natural logarithm of the number
#[inline]
fn ln(self) -> f64 {
unsafe { intrinsics::logf64(self) }
}
/// Returns the logarithm of the number with respect to an arbitrary base
#[inline]
fn log(self, base: f64) -> f64 { self.ln() / base.ln() }
/// Returns the base 2 logarithm of the number
#[inline]
fn log2(self) -> f64 {
unsafe { intrinsics::log2f64(self) }
}
/// Returns the base 10 logarithm of the number
#[inline]
fn log10(self) -> f64 {
unsafe { intrinsics::log10f64(self) }
}
/// Returns the natural logarithm of the number plus `1` (`ln(1+n)`) more
/// accurately than if the operations were performed separately
#[inline]
fn ln_1p(self) -> f64 {
unsafe { cmath::log1p(self) }
}
#[inline]
fn sinh(self) -> f64 {
unsafe { cmath::sinh(self) }
}
#[inline]
fn cosh(self) -> f64 {
unsafe { cmath::cosh(self) }
}
#[inline]
fn tanh(self) -> f64 {
unsafe { cmath::tanh(self) }
}
/// Inverse hyperbolic sine
///
/// # Returns
///
/// - on success, the inverse hyperbolic sine of `self` will be returned
/// - `self` if `self` is `0.0`, `-0.0`, `INFINITY`, or `NEG_INFINITY`
/// - `NAN` if `self` is `NAN`
#[inline]
fn asinh(self) -> f64 {
match self {
NEG_INFINITY => NEG_INFINITY,
x => (x + ((x * x) + 1.0).sqrt()).ln(),
}
}
/// Inverse hyperbolic cosine
///
/// # Returns
///
/// - on success, the inverse hyperbolic cosine of `self` will be returned
/// - `INFINITY` if `self` is `INFINITY`
/// - `NAN` if `self` is `NAN` or `self < 1.0` (including `NEG_INFINITY`)
#[inline]
fn acosh(self) -> f64 {
match self {
x if x < 1.0 => Float::nan(),
x => (x + ((x * x) - 1.0).sqrt()).ln(),
}
}
/// Inverse hyperbolic tangent
///
/// # Returns
///
/// - on success, the inverse hyperbolic tangent of `self` will be returned
/// - `self` if `self` is `0.0` or `-0.0`
/// - `INFINITY` if `self` is `1.0`
/// - `NEG_INFINITY` if `self` is `-1.0`
/// - `NAN` if the `self` is `NAN` or outside the domain of `-1.0 <= self <= 1.0`
/// (including `INFINITY` and `NEG_INFINITY`)
#[inline]
fn atanh(self) -> f64 {
0.5 * ((2.0 * self) / (1.0 - self)).ln_1p()
}
/// Converts to degrees, assuming the number is in radians
#[inline]
fn to_degrees(self) -> f64 { self * (180.0f64 / Float::pi()) }
/// Converts to radians, assuming the number is in degrees
#[inline]
fn to_radians(self) -> f64 {
let value: f64 = Float::pi();
self * (value / 180.0)
}
}
//
// Section: String Conversions
//
/// Converts a float to a string
///
/// # Arguments
///
/// * num - The float value
#[inline]
pub fn to_str(num: f64) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false);
r
}
/// Converts a float to a string in hexadecimal format
///
/// # Arguments
///
/// * num - The float value
#[inline]
pub fn to_str_hex(num: f64) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 16u, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false);
r
}
/// Converts a float to a string in a given radix, and a flag indicating
/// whether it's a special value
///
/// # Arguments
///
/// * num - The float value
/// * radix - The base to use
#[inline]
pub fn to_str_radix_special(num: f64, rdx: uint) -> (~str, bool) {
strconv::float_to_str_common(num, rdx, true,
strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false)
}
/// Converts a float to a string with exactly the number of
/// provided significant digits
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of significant digits
#[inline]
pub fn to_str_exact(num: f64, dig: uint) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigExact(dig), strconv::ExpNone, false);
r
}
/// Converts a float to a string with a maximum number of
/// significant digits
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of significant digits
#[inline]
pub fn to_str_digits(num: f64, dig: uint) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigMax(dig), strconv::ExpNone, false);
r
}
/// Converts a float to a string using the exponential notation with exactly the number of
/// provided digits after the decimal point in the significand
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of digits after the decimal point
/// * upper - Use `E` instead of `e` for the exponent sign
#[inline]
pub fn to_str_exp_exact(num: f64, dig: uint, upper: bool) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigExact(dig), strconv::ExpDec, upper);
r
}
/// Converts a float to a string using the exponential notation with the maximum number of
/// digits after the decimal point in the significand
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of digits after the decimal point
/// * upper - Use `E` instead of `e` for the exponent sign
#[inline]
pub fn to_str_exp_digits(num: f64, dig: uint, upper: bool) -> ~str {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, strconv::SignNeg, strconv::DigMax(dig), strconv::ExpDec, upper);
r
}
impl num::ToStrRadix for f64 {
/// Converts a float to a string in a given radix
///
/// # Arguments
///
/// * num - The float value
/// * radix - The base to use
///
/// # Failure
///
/// Fails if called on a special value like `inf`, `-inf` or `NAN` due to
/// possible misinterpretation of the result at higher bases. If those values
/// are expected, use `to_str_radix_special()` instead.
#[inline]
fn to_str_radix(&self, rdx: uint) -> ~str {
let (r, special) = strconv::float_to_str_common(
*self, rdx, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false);
if special { fail!("number has a special value, \
try to_str_radix_special() if those are expected") }
r
}
}
/// Convert a string in base 16 to a float.
/// Accepts an optional binary exponent.
///
/// This function accepts strings such as
///
/// * 'a4.fe'
/// * '+a4.fe', equivalent to 'a4.fe'
/// * '-a4.fe'
/// * '2b.aP128', or equivalently, '2b.ap128'
/// * '2b.aP-128'
/// * '.' (understood as 0)
/// * 'c.'
/// * '.c', or, equivalently, '0.c'
/// * '+inf', 'inf', '-inf', 'NaN'
///
/// Leading and trailing whitespace represent an error.
///
/// # Arguments
///
/// * num - A string
///
/// # Return value
///
/// `None` if the string did not represent a valid number. Otherwise,
/// `Some(n)` where `n` is the floating-point number represented by `[num]`.
#[inline]
pub fn from_str_hex(num: &str) -> Option<f64> {
strconv::from_str_common(num, 16u, true, true, true,
strconv::ExpBin, false, false)
}
impl FromStr for f64 {
/// Convert a string in base 10 to a float.
/// Accepts an optional decimal exponent.
///
/// This function accepts strings such as
///
/// * '3.14'
/// * '+3.14', equivalent to '3.14'
/// * '-3.14'
/// * '2.5E10', or equivalently, '2.5e10'
/// * '2.5E-10'
/// * '.' (understood as 0)
/// * '5.'
/// * '.5', or, equivalently, '0.5'
/// * '+inf', 'inf', '-inf', 'NaN'
///
/// Leading and trailing whitespace represent an error.
///
/// # Arguments
///
/// * num - A string
///
/// # Return value
///
/// `none` if the string did not represent a valid number. Otherwise,
/// `Some(n)` where `n` is the floating-point number represented by `num`.
#[inline]
fn from_str(val: &str) -> Option<f64> {
strconv::from_str_common(val, 10u, true, true, true,
strconv::ExpDec, false, false)
}
}
impl num::FromStrRadix for f64 {
/// Convert a string in a given base to a float.
///
/// Due to possible conflicts, this function does **not** accept
/// the special values `inf`, `-inf`, `+inf` and `NaN`, **nor**
/// does it recognize exponents of any kind.
///
/// Leading and trailing whitespace represent an error.
///
/// # Arguments
///
/// * num - A string
/// * radix - The base to use. Must lie in the range [2 .. 36]
///
/// # Return value
///
/// `None` if the string did not represent a valid number. Otherwise,
/// `Some(n)` where `n` is the floating-point number represented by `num`.
#[inline]
fn from_str_radix(val: &str, rdx: uint) -> Option<f64> {
strconv::from_str_common(val, rdx, true, true, false,
strconv::ExpNone, false, false)
}
}
#[cfg(test)]
mod tests {
use f64::*;
use num::*;
use num;
#[test]
fn test_min_nan() {
assert_eq!(NAN.min(2.0), 2.0);
assert_eq!(2.0f64.min(NAN), 2.0);
}
#[test]
fn test_max_nan() {
assert_eq!(NAN.max(2.0), 2.0);
assert_eq!(2.0f64.max(NAN), 2.0);
}
#[test]
fn test_num() {
num::test_num(10f64, 2f64);
}
#[test]
fn test_floor() {
assert_approx_eq!(1.0f64.floor(), 1.0f64);
assert_approx_eq!(1.3f64.floor(), 1.0f64);
assert_approx_eq!(1.5f64.floor(), 1.0f64);
assert_approx_eq!(1.7f64.floor(), 1.0f64);
assert_approx_eq!(0.0f64.floor(), 0.0f64);
assert_approx_eq!((-0.0f64).floor(), -0.0f64);
assert_approx_eq!((-1.0f64).floor(), -1.0f64);
assert_approx_eq!((-1.3f64).floor(), -2.0f64);
assert_approx_eq!((-1.5f64).floor(), -2.0f64);
assert_approx_eq!((-1.7f64).floor(), -2.0f64);
}
#[test]
fn test_ceil() {
assert_approx_eq!(1.0f64.ceil(), 1.0f64);
assert_approx_eq!(1.3f64.ceil(), 2.0f64);
assert_approx_eq!(1.5f64.ceil(), 2.0f64);
assert_approx_eq!(1.7f64.ceil(), 2.0f64);
assert_approx_eq!(0.0f64.ceil(), 0.0f64);
assert_approx_eq!((-0.0f64).ceil(), -0.0f64);
assert_approx_eq!((-1.0f64).ceil(), -1.0f64);
assert_approx_eq!((-1.3f64).ceil(), -1.0f64);
assert_approx_eq!((-1.5f64).ceil(), -1.0f64);
assert_approx_eq!((-1.7f64).ceil(), -1.0f64);
}
#[test]
fn test_round() {
assert_approx_eq!(1.0f64.round(), 1.0f64);
assert_approx_eq!(1.3f64.round(), 1.0f64);
assert_approx_eq!(1.5f64.round(), 2.0f64);
assert_approx_eq!(1.7f64.round(), 2.0f64);
assert_approx_eq!(0.0f64.round(), 0.0f64);
assert_approx_eq!((-0.0f64).round(), -0.0f64);
assert_approx_eq!((-1.0f64).round(), -1.0f64);
assert_approx_eq!((-1.3f64).round(), -1.0f64);
assert_approx_eq!((-1.5f64).round(), -2.0f64);
assert_approx_eq!((-1.7f64).round(), -2.0f64);
}
#[test]
fn test_trunc() {
assert_approx_eq!(1.0f64.trunc(), 1.0f64);
assert_approx_eq!(1.3f64.trunc(), 1.0f64);
assert_approx_eq!(1.5f64.trunc(), 1.0f64);
assert_approx_eq!(1.7f64.trunc(), 1.0f64);
assert_approx_eq!(0.0f64.trunc(), 0.0f64);
assert_approx_eq!((-0.0f64).trunc(), -0.0f64);
assert_approx_eq!((-1.0f64).trunc(), -1.0f64);
assert_approx_eq!((-1.3f64).trunc(), -1.0f64);
assert_approx_eq!((-1.5f64).trunc(), -1.0f64);
assert_approx_eq!((-1.7f64).trunc(), -1.0f64);
}
#[test]
fn test_fract() {
assert_approx_eq!(1.0f64.fract(), 0.0f64);
assert_approx_eq!(1.3f64.fract(), 0.3f64);
assert_approx_eq!(1.5f64.fract(), 0.5f64);
assert_approx_eq!(1.7f64.fract(), 0.7f64);
assert_approx_eq!(0.0f64.fract(), 0.0f64);
assert_approx_eq!((-0.0f64).fract(), -0.0f64);
assert_approx_eq!((-1.0f64).fract(), -0.0f64);
assert_approx_eq!((-1.3f64).fract(), -0.3f64);
assert_approx_eq!((-1.5f64).fract(), -0.5f64);
assert_approx_eq!((-1.7f64).fract(), -0.7f64);
}
#[test]
fn test_asinh() {
assert_eq!(0.0f64.asinh(), 0.0f64);
assert_eq!((-0.0f64).asinh(), -0.0f64);
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let nan: f64 = Float::nan();
assert_eq!(inf.asinh(), inf);
assert_eq!(neg_inf.asinh(), neg_inf);
assert!(nan.asinh().is_nan());
assert_approx_eq!(2.0f64.asinh(), 1.443635475178810342493276740273105f64);
assert_approx_eq!((-2.0f64).asinh(), -1.443635475178810342493276740273105f64);
}
#[test]
fn test_acosh() {
assert_eq!(1.0f64.acosh(), 0.0f64);
assert!(0.999f64.acosh().is_nan());
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let nan: f64 = Float::nan();
assert_eq!(inf.acosh(), inf);
assert!(neg_inf.acosh().is_nan());
assert!(nan.acosh().is_nan());
assert_approx_eq!(2.0f64.acosh(), 1.31695789692481670862504634730796844f64);
assert_approx_eq!(3.0f64.acosh(), 1.76274717403908605046521864995958461f64);
}
#[test]
fn test_atanh() {
assert_eq!(0.0f64.atanh(), 0.0f64);
assert_eq!((-0.0f64).atanh(), -0.0f64);
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let nan: f64 = Float::nan();
assert_eq!(1.0f64.atanh(), inf);
assert_eq!((-1.0f64).atanh(), neg_inf);
assert!(2f64.atanh().atanh().is_nan());
assert!((-2f64).atanh().atanh().is_nan());
assert!(inf.atanh().is_nan());
assert!(neg_inf.atanh().is_nan());
assert!(nan.atanh().is_nan());
assert_approx_eq!(0.5f64.atanh(), 0.54930614433405484569762261846126285f64);
assert_approx_eq!((-0.5f64).atanh(), -0.54930614433405484569762261846126285f64);
}
#[test]
fn test_real_consts() {
let pi: f64 = Float::pi();
let two_pi: f64 = Float::two_pi();
let frac_pi_2: f64 = Float::frac_pi_2();
let frac_pi_3: f64 = Float::frac_pi_3();
let frac_pi_4: f64 = Float::frac_pi_4();
let frac_pi_6: f64 = Float::frac_pi_6();
let frac_pi_8: f64 = Float::frac_pi_8();
let frac_1_pi: f64 = Float::frac_1_pi();
let frac_2_pi: f64 = Float::frac_2_pi();
let frac_2_sqrtpi: f64 = Float::frac_2_sqrtpi();
let sqrt2: f64 = Float::sqrt2();
let frac_1_sqrt2: f64 = Float::frac_1_sqrt2();
let e: f64 = Float::e();
let log2_e: f64 = Float::log2_e();
let log10_e: f64 = Float::log10_e();
let ln_2: f64 = Float::ln_2();
let ln_10: f64 = Float::ln_10();
assert_approx_eq!(two_pi, 2.0 * pi);
assert_approx_eq!(frac_pi_2, pi / 2f64);
assert_approx_eq!(frac_pi_3, pi / 3f64);
assert_approx_eq!(frac_pi_4, pi / 4f64);
assert_approx_eq!(frac_pi_6, pi / 6f64);
assert_approx_eq!(frac_pi_8, pi / 8f64);
assert_approx_eq!(frac_1_pi, 1f64 / pi);
assert_approx_eq!(frac_2_pi, 2f64 / pi);
assert_approx_eq!(frac_2_sqrtpi, 2f64 / pi.sqrt());
assert_approx_eq!(sqrt2, 2f64.sqrt());
assert_approx_eq!(frac_1_sqrt2, 1f64 / 2f64.sqrt());
assert_approx_eq!(log2_e, e.log2());
assert_approx_eq!(log10_e, e.log10());
assert_approx_eq!(ln_2, 2f64.ln());
assert_approx_eq!(ln_10, 10f64.ln());
}
#[test]
pub fn test_abs() {
assert_eq!(INFINITY.abs(), INFINITY);
assert_eq!(1f64.abs(), 1f64);
assert_eq!(0f64.abs(), 0f64);
assert_eq!((-0f64).abs(), 0f64);
assert_eq!((-1f64).abs(), 1f64);
assert_eq!(NEG_INFINITY.abs(), INFINITY);
assert_eq!((1f64/NEG_INFINITY).abs(), 0f64);
assert!(NAN.abs().is_nan());
}
#[test]
fn test_abs_sub() {
assert_eq!((-1f64).abs_sub(&1f64), 0f64);
assert_eq!(1f64.abs_sub(&1f64), 0f64);
assert_eq!(1f64.abs_sub(&0f64), 1f64);
assert_eq!(1f64.abs_sub(&-1f64), 2f64);
assert_eq!(NEG_INFINITY.abs_sub(&0f64), 0f64);
assert_eq!(INFINITY.abs_sub(&1f64), INFINITY);
assert_eq!(0f64.abs_sub(&NEG_INFINITY), INFINITY);
assert_eq!(0f64.abs_sub(&INFINITY), 0f64);
}
#[test]
fn test_abs_sub_nowin() {
assert!(NAN.abs_sub(&-1f64).is_nan());
assert!(1f64.abs_sub(&NAN).is_nan());
}
#[test]
fn test_signum() {
assert_eq!(INFINITY.signum(), 1f64);
assert_eq!(1f64.signum(), 1f64);
assert_eq!(0f64.signum(), 1f64);
assert_eq!((-0f64).signum(), -1f64);
assert_eq!((-1f64).signum(), -1f64);
assert_eq!(NEG_INFINITY.signum(), -1f64);
assert_eq!((1f64/NEG_INFINITY).signum(), -1f64);
assert!(NAN.signum().is_nan());
}
#[test]
fn test_is_positive() {
assert!(INFINITY.is_positive());
assert!(1f64.is_positive());
assert!(0f64.is_positive());
assert!(!(-0f64).is_positive());
assert!(!(-1f64).is_positive());
assert!(!NEG_INFINITY.is_positive());
assert!(!(1f64/NEG_INFINITY).is_positive());
assert!(!NAN.is_positive());
}
#[test]
fn test_is_negative() {
assert!(!INFINITY.is_negative());
assert!(!1f64.is_negative());
assert!(!0f64.is_negative());
assert!((-0f64).is_negative());
assert!((-1f64).is_negative());
assert!(NEG_INFINITY.is_negative());
assert!((1f64/NEG_INFINITY).is_negative());
assert!(!NAN.is_negative());
}
#[test]
fn test_is_normal() {
let nan: f64 = Float::nan();
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let zero: f64 = Zero::zero();
let neg_zero: f64 = Float::neg_zero();
assert!(!nan.is_normal());
assert!(!inf.is_normal());
assert!(!neg_inf.is_normal());
assert!(!zero.is_normal());
assert!(!neg_zero.is_normal());
assert!(1f64.is_normal());
assert!(1e-307f64.is_normal());
assert!(!1e-308f64.is_normal());
}
#[test]
fn test_classify() {
let nan: f64 = Float::nan();
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let zero: f64 = Zero::zero();
let neg_zero: f64 = Float::neg_zero();
assert_eq!(nan.classify(), FPNaN);
assert_eq!(inf.classify(), FPInfinite);
assert_eq!(neg_inf.classify(), FPInfinite);
assert_eq!(zero.classify(), FPZero);
assert_eq!(neg_zero.classify(), FPZero);
assert_eq!(1e-307f64.classify(), FPNormal);
assert_eq!(1e-308f64.classify(), FPSubnormal);
}
#[test]
fn test_ldexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f64 = from_str_hex("1p-123").unwrap();
let f2: f64 = from_str_hex("1p-111").unwrap();
assert_eq!(Float::ldexp(1f64, -123), f1);
assert_eq!(Float::ldexp(1f64, -111), f2);
assert_eq!(Float::ldexp(0f64, -123), 0f64);
assert_eq!(Float::ldexp(-0f64, -123), -0f64);
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let nan: f64 = Float::nan();
assert_eq!(Float::ldexp(inf, -123), inf);
assert_eq!(Float::ldexp(neg_inf, -123), neg_inf);
assert!(Float::ldexp(nan, -123).is_nan());
}
#[test]
fn test_frexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f64 = from_str_hex("1p-123").unwrap();
let f2: f64 = from_str_hex("1p-111").unwrap();
let (x1, exp1) = f1.frexp();
let (x2, exp2) = f2.frexp();
assert_eq!((x1, exp1), (0.5f64, -122));
assert_eq!((x2, exp2), (0.5f64, -110));
assert_eq!(Float::ldexp(x1, exp1), f1);
assert_eq!(Float::ldexp(x2, exp2), f2);
assert_eq!(0f64.frexp(), (0f64, 0));
assert_eq!((-0f64).frexp(), (-0f64, 0));
}
#[test] #[ignore(cfg(windows))] // FIXME #8755
fn test_frexp_nowin() {
let inf: f64 = Float::infinity();
let neg_inf: f64 = Float::neg_infinity();
let nan: f64 = Float::nan();
assert_eq!(match inf.frexp() { (x, _) => x }, inf)
assert_eq!(match neg_inf.frexp() { (x, _) => x }, neg_inf)
assert!(match nan.frexp() { (x, _) => x.is_nan() })
}
#[test]
fn test_integer_decode() {
assert_eq!(3.14159265359f64.integer_decode(), (7074237752028906u64, -51i16, 1i8));
assert_eq!((-8573.5918555f64).integer_decode(), (4713381968463931u64, -39i16, -1i8));
assert_eq!(2f64.powf(100.0).integer_decode(), (4503599627370496u64, 48i16, 1i8));
assert_eq!(0f64.integer_decode(), (0u64, -1075i16, 1i8));
assert_eq!((-0f64).integer_decode(), (0u64, -1075i16, -1i8));
assert_eq!(INFINITY.integer_decode(), (4503599627370496u64, 972i16, 1i8));
assert_eq!(NEG_INFINITY.integer_decode(), (4503599627370496, 972, -1));
assert_eq!(NAN.integer_decode(), (6755399441055744u64, 972i16, 1i8));
}
}
| 30.462279 | 93 | 0.574639 |
ef7e0fe6c51668b8b3ce87f0ebc2fe4e4541bd41 | 925 | fn numbers_to_string(numbers: &[u8]) -> String {
numbers.iter().map(u8::to_string).collect()
}
#[allow(dead_code)]
fn create_phone_number(numbers: &[u8]) -> String {
format!(
"({}) {}-{}",
numbers_to_string(&numbers[0..3]),
numbers_to_string(&numbers[3..6]),
numbers_to_string(&numbers[6..10])
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_number_conversion() {
assert_eq!(numbers_to_string(&[1, 2, 3]), "123");
}
#[test]
fn returns_expected() {
assert_eq!(
create_phone_number(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 0]),
"(123) 456-7890"
);
assert_eq!(
create_phone_number(&[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
"(111) 111-1111"
);
assert_eq!(
create_phone_number(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 9]),
"(123) 456-7899"
);
}
}
| 23.125 | 65 | 0.486486 |
215819f3020c2521741617c6aab28822d8e67a05 | 9,036 | /// This list represents the default file types that ripgrep ships with. In
/// general, any file format is fair game, although it should generally be
/// limited to reasonably popular open formats. For other cases, you can add
/// types to each invocation of ripgrep with the '--type-add' flag.
///
/// If you would like to add or improve this list, please file a PR:
/// https://github.com/BurntSushi/ripgrep
///
/// Please try to keep this list sorted lexicographically and wrapped to 79
/// columns (inclusive).
#[rustfmt::skip]
pub const DEFAULT_TYPES: &[(&str, &[&str])] = &[
("agda", &["*.agda", "*.lagda"]),
("aidl", &["*.aidl"]),
("amake", &["*.mk", "*.bp"]),
("asciidoc", &["*.adoc", "*.asc", "*.asciidoc"]),
("asm", &["*.asm", "*.s", "*.S"]),
("asp", &[
"*.aspx", "*.aspx.cs", "*.aspx.cs", "*.ascx", "*.ascx.cs", "*.ascx.vb",
]),
("ats", &["*.ats", "*.dats", "*.sats", "*.hats"]),
("avro", &["*.avdl", "*.avpr", "*.avsc"]),
("awk", &["*.awk"]),
("bazel", &["*.bazel", "*.bzl", "BUILD", "WORKSPACE"]),
("bitbake", &["*.bb", "*.bbappend", "*.bbclass", "*.conf", "*.inc"]),
("brotli", &["*.br"]),
("buildstream", &["*.bst"]),
("bzip2", &["*.bz2", "*.tbz2"]),
("c", &["*.[chH]", "*.[chH].in", "*.cats"]),
("cabal", &["*.cabal"]),
("cbor", &["*.cbor"]),
("ceylon", &["*.ceylon"]),
("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]),
("cmake", &["*.cmake", "CMakeLists.txt"]),
("coffeescript", &["*.coffee"]),
("config", &["*.cfg", "*.conf", "*.config", "*.ini"]),
("coq", &["*.v"]),
("cpp", &[
"*.[ChH]", "*.cc", "*.[ch]pp", "*.[ch]xx", "*.hh", "*.inl",
"*.[ChH].in", "*.cc.in", "*.[ch]pp.in", "*.[ch]xx.in", "*.hh.in",
]),
("creole", &["*.creole"]),
("crystal", &["Projectfile", "*.cr"]),
("cs", &["*.cs"]),
("csharp", &["*.cs"]),
("cshtml", &["*.cshtml"]),
("css", &["*.css", "*.scss"]),
("csv", &["*.csv"]),
("cython", &["*.pyx", "*.pxi", "*.pxd"]),
("d", &["*.d"]),
("dart", &["*.dart"]),
("dhall", &["*.dhall"]),
("diff", &["*.patch", "*.diff"]),
("docker", &["*Dockerfile*"]),
("dvc", &["Dvcfile", "*.dvc"]),
("ebuild", &["*.ebuild"]),
("edn", &["*.edn"]),
("elisp", &["*.el"]),
("elixir", &["*.ex", "*.eex", "*.exs"]),
("elm", &["*.elm"]),
("erb", &["*.erb"]),
("erlang", &["*.erl", "*.hrl"]),
("fidl", &["*.fidl"]),
("fish", &["*.fish"]),
("flatbuffers", &["*.fbs"]),
("fortran", &[
"*.f", "*.F", "*.f77", "*.F77", "*.pfo",
"*.f90", "*.F90", "*.f95", "*.F95",
]),
("fsharp", &["*.fs", "*.fsx", "*.fsi"]),
("fut", &[".fut"]),
("gap", &["*.g", "*.gap", "*.gi", "*.gd", "*.tst"]),
("gn", &["*.gn", "*.gni"]),
("go", &["*.go"]),
("gradle", &["*.gradle"]),
("groovy", &["*.groovy", "*.gradle"]),
("gzip", &["*.gz", "*.tgz"]),
("h", &["*.h", "*.hpp"]),
("haml", &["*.haml"]),
("haskell", &["*.hs", "*.lhs", "*.cpphs", "*.c2hs", "*.hsc"]),
("hbs", &["*.hbs"]),
("hs", &["*.hs", "*.lhs"]),
("html", &["*.htm", "*.html", "*.ejs"]),
("idris", &["*.idr", "*.lidr"]),
("java", &["*.java", "*.jsp", "*.jspx", "*.properties"]),
("jinja", &["*.j2", "*.jinja", "*.jinja2"]),
("jl", &["*.jl"]),
("js", &["*.js", "*.jsx", "*.vue"]),
("json", &["*.json", "composer.lock"]),
("jsonl", &["*.jsonl"]),
("julia", &["*.jl"]),
("jupyter", &["*.ipynb", "*.jpynb"]),
("k", &["*.k"]),
("kotlin", &["*.kt", "*.kts"]),
("less", &["*.less"]),
("license", &[
// General
"COPYING", "COPYING[.-]*",
"COPYRIGHT", "COPYRIGHT[.-]*",
"EULA", "EULA[.-]*",
"licen[cs]e", "licen[cs]e.*",
"LICEN[CS]E", "LICEN[CS]E[.-]*", "*[.-]LICEN[CS]E*",
"NOTICE", "NOTICE[.-]*",
"PATENTS", "PATENTS[.-]*",
"UNLICEN[CS]E", "UNLICEN[CS]E[.-]*",
// GPL (gpl.txt, etc.)
"agpl[.-]*",
"gpl[.-]*",
"lgpl[.-]*",
// Other license-specific (APACHE-2.0.txt, etc.)
"AGPL-*[0-9]*",
"APACHE-*[0-9]*",
"BSD-*[0-9]*",
"CC-BY-*",
"GFDL-*[0-9]*",
"GNU-*[0-9]*",
"GPL-*[0-9]*",
"LGPL-*[0-9]*",
"MIT-*[0-9]*",
"MPL-*[0-9]*",
"OFL-*[0-9]*",
]),
("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]),
("lock", &["*.lock", "package-lock.json"]),
("log", &["*.log"]),
("lua", &["*.lua"]),
("lz4", &["*.lz4"]),
("lzma", &["*.lzma"]),
("m4", &["*.ac", "*.m4"]),
("make", &[
"[Gg][Nn][Uu]makefile", "[Mm]akefile",
"[Gg][Nn][Uu]makefile.am", "[Mm]akefile.am",
"[Gg][Nn][Uu]makefile.in", "[Mm]akefile.in",
"*.mk", "*.mak"
]),
("mako", &["*.mako", "*.mao"]),
("man", &["*.[0-9lnpx]", "*.[0-9][cEFMmpSx]"]),
("markdown", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]),
("matlab", &["*.m"]),
("md", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]),
("meson", &["meson.build", "meson_options.txt"]),
("minified", &["*.min.html", "*.min.css", "*.min.js"]),
("mk", &["mkfile"]),
("ml", &["*.ml"]),
("msbuild", &[
"*.csproj", "*.fsproj", "*.vcxproj", "*.proj", "*.props", "*.targets",
]),
("nim", &["*.nim", "*.nimf", "*.nimble", "*.nims"]),
("nix", &["*.nix"]),
("objc", &["*.h", "*.m"]),
("objcpp", &["*.h", "*.mm"]),
("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]),
("org", &["*.org", "*.org_archive"]),
("pascal", &["*.pas", "*.dpr", "*.lpr", "*.pp", "*.inc"]),
("pdf", &["*.pdf"]),
("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm", "*.t"]),
("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]),
("pod", &["*.pod"]),
("postscript", &["*.eps", "*.ps"]),
("protobuf", &["*.proto"]),
("ps", &["*.cdxml", "*.ps1", "*.ps1xml", "*.psd1", "*.psm1"]),
("puppet", &["*.erb", "*.pp", "*.rb"]),
("purs", &["*.purs"]),
("py", &["*.py"]),
("qmake", &["*.pro", "*.pri", "*.prf"]),
("qml", &["*.qml"]),
("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]),
("racket", &["*.rkt"]),
("rdoc", &["*.rdoc"]),
("readme", &["README*", "*README"]),
("robot", &["*.robot"]),
("rst", &["*.rst"]),
("ruby", &["Gemfile", "*.gemspec", ".irbrc", "Rakefile", "*.rb"]),
("rust", &["*.rs"]),
("sass", &["*.sass", "*.scss"]),
("scala", &["*.scala", "*.sbt"]),
("sh", &[
// Portable/misc. init files
".login", ".logout", ".profile", "profile",
// bash-specific init files
".bash_login", "bash_login",
".bash_logout", "bash_logout",
".bash_profile", "bash_profile",
".bashrc", "bashrc", "*.bashrc",
// csh-specific init files
".cshrc", "*.cshrc",
// ksh-specific init files
".kshrc", "*.kshrc",
// tcsh-specific init files
".tcshrc",
// zsh-specific init files
".zshenv", "zshenv",
".zlogin", "zlogin",
".zlogout", "zlogout",
".zprofile", "zprofile",
".zshrc", "zshrc",
// Extensions
"*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh", "*.zsh",
]),
("slim", &["*.skim", "*.slim", "*.slime"]),
("smarty", &["*.tpl"]),
("sml", &["*.sml", "*.sig"]),
("soy", &["*.soy"]),
("spark", &["*.spark"]),
("spec", &["*.spec"]),
("sql", &["*.sql", "*.psql"]),
("stylus", &["*.styl"]),
("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]),
("svg", &["*.svg"]),
("swift", &["*.swift"]),
("swig", &["*.def", "*.i"]),
("systemd", &[
"*.automount", "*.conf", "*.device", "*.link", "*.mount", "*.path",
"*.scope", "*.service", "*.slice", "*.socket", "*.swap", "*.target",
"*.timer",
]),
("taskpaper", &["*.taskpaper"]),
("tcl", &["*.tcl"]),
("tex", &["*.tex", "*.ltx", "*.cls", "*.sty", "*.bib", "*.dtx", "*.ins"]),
("textile", &["*.textile"]),
("tf", &["*.tf"]),
("thrift", &["*.thrift"]),
("toml", &["*.toml", "Cargo.lock"]),
("ts", &["*.ts", "*.tsx"]),
("twig", &["*.twig"]),
("txt", &["*.txt"]),
("typoscript", &["*.typoscript", "*.ts"]),
("vala", &["*.vala"]),
("vb", &["*.vb"]),
("vcl", &["*.vcl"]),
("verilog", &["*.v", "*.vh", "*.sv", "*.svh"]),
("vhdl", &["*.vhd", "*.vhdl"]),
("vim", &["*.vim"]),
("vimscript", &["*.vim"]),
("webidl", &["*.idl", "*.webidl", "*.widl"]),
("wiki", &["*.mediawiki", "*.wiki"]),
("xml", &[
"*.xml", "*.xml.dist", "*.dtd", "*.xsl", "*.xslt", "*.xsd", "*.xjb",
"*.rng", "*.sch", "*.xhtml",
]),
("xz", &["*.xz", "*.txz"]),
("yacc", &["*.y"]),
("yaml", &["*.yaml", "*.yml"]),
("z", &["*.Z"]),
("zig", &["*.zig"]),
("zsh", &[
".zshenv", "zshenv",
".zlogin", "zlogin",
".zlogout", "zlogout",
".zprofile", "zprofile",
".zshrc", "zshrc",
"*.zsh",
]),
("zstd", &["*.zst", "*.zstd"]),
];
| 35.296875 | 79 | 0.365649 |
d5f66c2e3d339cff36ee702bca4311ba2fe88523 | 1,256 | #[macro_use(err_obj)]
#[macro_use(err)]
extern crate dprint_core;
#[cfg(test)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
mod environment;
use dprint_core::types::ErrBox;
use std::sync::Arc;
use environment::RealEnvironment;
mod cache;
mod cli;
mod configuration;
mod plugins;
mod utils;
fn main() -> Result<(), ErrBox> {
match run() {
Ok(_) => {},
Err(err) => {
eprintln!("{}", err.to_string());
std::process::exit(1);
}
}
Ok(())
}
fn run() -> Result<(), ErrBox> {
let stdin_reader = cli::RealStdInReader::new();
let args = cli::parse_args(wild::args().collect(), &stdin_reader)?;
let environment = RealEnvironment::new(args.verbose, args.is_silent_output())?;
let cache = Arc::new(cache::Cache::new(environment.clone()));
let plugin_cache = Arc::new(plugins::PluginCache::new(environment.clone()));
let plugin_pools = Arc::new(plugins::PluginPools::new(environment.clone()));
let _plugins_dropper = plugins::PluginsDropper::new(plugin_pools.clone());
let plugin_resolver = plugins::PluginResolver::new(environment.clone(), plugin_cache, plugin_pools.clone());
cli::run_cli(args, &environment, &cache, &plugin_resolver, plugin_pools.clone())
}
| 28.545455 | 112 | 0.660032 |
4847f03298bbb3ade636ac83cf220fb8e48df5e8 | 470 | use std::io::Write;
use tempfile::NamedTempFile;
pub trait Client {
fn get_data(&self) -> Result<(Option<Vec<u8>>, Vec<u8>), String>;
}
pub fn download(content: Vec<u8>) {
let path = write_tmp_file(content);
open::that(path).unwrap();
}
fn write_tmp_file(content: Vec<u8>) -> std::path::PathBuf {
let mut tmp_file = NamedTempFile::new().unwrap();
tmp_file.write_all(&content).unwrap();
let (_file, path) = tmp_file.keep().unwrap();
path
}
| 24.736842 | 69 | 0.651064 |
2f1b2bd649a9d38e83e90ba00f27de63a564fdae | 26,028 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_structure_crate_input_batch_check_layer_availability_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::BatchCheckLayerAvailabilityInput,
) {
if let Some(var_1) = &input.registry_id {
object.key("registryId").string(var_1);
}
if let Some(var_2) = &input.repository_name {
object.key("repositoryName").string(var_2);
}
if let Some(var_3) = &input.layer_digests {
let mut array_4 = object.key("layerDigests").start_array();
for item_5 in var_3 {
{
array_4.value().string(item_5);
}
}
array_4.finish();
}
}
pub fn serialize_structure_crate_input_batch_delete_image_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::BatchDeleteImageInput,
) {
if let Some(var_6) = &input.registry_id {
object.key("registryId").string(var_6);
}
if let Some(var_7) = &input.repository_name {
object.key("repositoryName").string(var_7);
}
if let Some(var_8) = &input.image_ids {
let mut array_9 = object.key("imageIds").start_array();
for item_10 in var_8 {
{
let mut object_11 = array_9.value().start_object();
crate::json_ser::serialize_structure_crate_model_image_identifier(
&mut object_11,
item_10,
);
object_11.finish();
}
}
array_9.finish();
}
}
pub fn serialize_structure_crate_input_batch_get_image_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::BatchGetImageInput,
) {
if let Some(var_12) = &input.registry_id {
object.key("registryId").string(var_12);
}
if let Some(var_13) = &input.repository_name {
object.key("repositoryName").string(var_13);
}
if let Some(var_14) = &input.image_ids {
let mut array_15 = object.key("imageIds").start_array();
for item_16 in var_14 {
{
let mut object_17 = array_15.value().start_object();
crate::json_ser::serialize_structure_crate_model_image_identifier(
&mut object_17,
item_16,
);
object_17.finish();
}
}
array_15.finish();
}
if let Some(var_18) = &input.accepted_media_types {
let mut array_19 = object.key("acceptedMediaTypes").start_array();
for item_20 in var_18 {
{
array_19.value().string(item_20);
}
}
array_19.finish();
}
}
pub fn serialize_structure_crate_input_complete_layer_upload_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CompleteLayerUploadInput,
) {
if let Some(var_21) = &input.registry_id {
object.key("registryId").string(var_21);
}
if let Some(var_22) = &input.repository_name {
object.key("repositoryName").string(var_22);
}
if let Some(var_23) = &input.upload_id {
object.key("uploadId").string(var_23);
}
if let Some(var_24) = &input.layer_digests {
let mut array_25 = object.key("layerDigests").start_array();
for item_26 in var_24 {
{
array_25.value().string(item_26);
}
}
array_25.finish();
}
}
pub fn serialize_structure_crate_input_create_repository_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateRepositoryInput,
) {
if let Some(var_27) = &input.repository_name {
object.key("repositoryName").string(var_27);
}
if let Some(var_28) = &input.tags {
let mut array_29 = object.key("tags").start_array();
for item_30 in var_28 {
{
let mut object_31 = array_29.value().start_object();
crate::json_ser::serialize_structure_crate_model_tag(&mut object_31, item_30);
object_31.finish();
}
}
array_29.finish();
}
if let Some(var_32) = &input.image_tag_mutability {
object.key("imageTagMutability").string(var_32.as_str());
}
if let Some(var_33) = &input.image_scanning_configuration {
let mut object_34 = object.key("imageScanningConfiguration").start_object();
crate::json_ser::serialize_structure_crate_model_image_scanning_configuration(
&mut object_34,
var_33,
);
object_34.finish();
}
if let Some(var_35) = &input.encryption_configuration {
let mut object_36 = object.key("encryptionConfiguration").start_object();
crate::json_ser::serialize_structure_crate_model_encryption_configuration(
&mut object_36,
var_35,
);
object_36.finish();
}
}
pub fn serialize_structure_crate_input_delete_lifecycle_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DeleteLifecyclePolicyInput,
) {
if let Some(var_37) = &input.registry_id {
object.key("registryId").string(var_37);
}
if let Some(var_38) = &input.repository_name {
object.key("repositoryName").string(var_38);
}
}
pub fn serialize_structure_crate_input_delete_repository_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DeleteRepositoryInput,
) {
if let Some(var_39) = &input.registry_id {
object.key("registryId").string(var_39);
}
if let Some(var_40) = &input.repository_name {
object.key("repositoryName").string(var_40);
}
if input.force {
object.key("force").boolean(input.force);
}
}
pub fn serialize_structure_crate_input_delete_repository_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DeleteRepositoryPolicyInput,
) {
if let Some(var_41) = &input.registry_id {
object.key("registryId").string(var_41);
}
if let Some(var_42) = &input.repository_name {
object.key("repositoryName").string(var_42);
}
}
pub fn serialize_structure_crate_input_describe_images_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DescribeImagesInput,
) {
if let Some(var_43) = &input.registry_id {
object.key("registryId").string(var_43);
}
if let Some(var_44) = &input.repository_name {
object.key("repositoryName").string(var_44);
}
if let Some(var_45) = &input.image_ids {
let mut array_46 = object.key("imageIds").start_array();
for item_47 in var_45 {
{
let mut object_48 = array_46.value().start_object();
crate::json_ser::serialize_structure_crate_model_image_identifier(
&mut object_48,
item_47,
);
object_48.finish();
}
}
array_46.finish();
}
if let Some(var_49) = &input.next_token {
object.key("nextToken").string(var_49);
}
if let Some(var_50) = &input.max_results {
object.key("maxResults").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_50).into()),
);
}
if let Some(var_51) = &input.filter {
let mut object_52 = object.key("filter").start_object();
crate::json_ser::serialize_structure_crate_model_describe_images_filter(
&mut object_52,
var_51,
);
object_52.finish();
}
}
pub fn serialize_structure_crate_input_describe_image_scan_findings_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DescribeImageScanFindingsInput,
) {
if let Some(var_53) = &input.registry_id {
object.key("registryId").string(var_53);
}
if let Some(var_54) = &input.repository_name {
object.key("repositoryName").string(var_54);
}
if let Some(var_55) = &input.image_id {
let mut object_56 = object.key("imageId").start_object();
crate::json_ser::serialize_structure_crate_model_image_identifier(&mut object_56, var_55);
object_56.finish();
}
if let Some(var_57) = &input.next_token {
object.key("nextToken").string(var_57);
}
if let Some(var_58) = &input.max_results {
object.key("maxResults").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_58).into()),
);
}
}
pub fn serialize_structure_crate_input_describe_repositories_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DescribeRepositoriesInput,
) {
if let Some(var_59) = &input.registry_id {
object.key("registryId").string(var_59);
}
if let Some(var_60) = &input.repository_names {
let mut array_61 = object.key("repositoryNames").start_array();
for item_62 in var_60 {
{
array_61.value().string(item_62);
}
}
array_61.finish();
}
if let Some(var_63) = &input.next_token {
object.key("nextToken").string(var_63);
}
if let Some(var_64) = &input.max_results {
object.key("maxResults").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_64).into()),
);
}
}
pub fn serialize_structure_crate_input_get_authorization_token_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::GetAuthorizationTokenInput,
) {
if let Some(var_65) = &input.registry_ids {
let mut array_66 = object.key("registryIds").start_array();
for item_67 in var_65 {
{
array_66.value().string(item_67);
}
}
array_66.finish();
}
}
pub fn serialize_structure_crate_input_get_download_url_for_layer_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::GetDownloadUrlForLayerInput,
) {
if let Some(var_68) = &input.registry_id {
object.key("registryId").string(var_68);
}
if let Some(var_69) = &input.repository_name {
object.key("repositoryName").string(var_69);
}
if let Some(var_70) = &input.layer_digest {
object.key("layerDigest").string(var_70);
}
}
pub fn serialize_structure_crate_input_get_lifecycle_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::GetLifecyclePolicyInput,
) {
if let Some(var_71) = &input.registry_id {
object.key("registryId").string(var_71);
}
if let Some(var_72) = &input.repository_name {
object.key("repositoryName").string(var_72);
}
}
pub fn serialize_structure_crate_input_get_lifecycle_policy_preview_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::GetLifecyclePolicyPreviewInput,
) {
if let Some(var_73) = &input.registry_id {
object.key("registryId").string(var_73);
}
if let Some(var_74) = &input.repository_name {
object.key("repositoryName").string(var_74);
}
if let Some(var_75) = &input.image_ids {
let mut array_76 = object.key("imageIds").start_array();
for item_77 in var_75 {
{
let mut object_78 = array_76.value().start_object();
crate::json_ser::serialize_structure_crate_model_image_identifier(
&mut object_78,
item_77,
);
object_78.finish();
}
}
array_76.finish();
}
if let Some(var_79) = &input.next_token {
object.key("nextToken").string(var_79);
}
if let Some(var_80) = &input.max_results {
object.key("maxResults").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_80).into()),
);
}
if let Some(var_81) = &input.filter {
let mut object_82 = object.key("filter").start_object();
crate::json_ser::serialize_structure_crate_model_lifecycle_policy_preview_filter(
&mut object_82,
var_81,
);
object_82.finish();
}
}
pub fn serialize_structure_crate_input_get_repository_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::GetRepositoryPolicyInput,
) {
if let Some(var_83) = &input.registry_id {
object.key("registryId").string(var_83);
}
if let Some(var_84) = &input.repository_name {
object.key("repositoryName").string(var_84);
}
}
pub fn serialize_structure_crate_input_initiate_layer_upload_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::InitiateLayerUploadInput,
) {
if let Some(var_85) = &input.registry_id {
object.key("registryId").string(var_85);
}
if let Some(var_86) = &input.repository_name {
object.key("repositoryName").string(var_86);
}
}
pub fn serialize_structure_crate_input_list_images_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::ListImagesInput,
) {
if let Some(var_87) = &input.registry_id {
object.key("registryId").string(var_87);
}
if let Some(var_88) = &input.repository_name {
object.key("repositoryName").string(var_88);
}
if let Some(var_89) = &input.next_token {
object.key("nextToken").string(var_89);
}
if let Some(var_90) = &input.max_results {
object.key("maxResults").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_90).into()),
);
}
if let Some(var_91) = &input.filter {
let mut object_92 = object.key("filter").start_object();
crate::json_ser::serialize_structure_crate_model_list_images_filter(&mut object_92, var_91);
object_92.finish();
}
}
pub fn serialize_structure_crate_input_list_tags_for_resource_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::ListTagsForResourceInput,
) {
if let Some(var_93) = &input.resource_arn {
object.key("resourceArn").string(var_93);
}
}
pub fn serialize_structure_crate_input_put_image_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutImageInput,
) {
if let Some(var_94) = &input.registry_id {
object.key("registryId").string(var_94);
}
if let Some(var_95) = &input.repository_name {
object.key("repositoryName").string(var_95);
}
if let Some(var_96) = &input.image_manifest {
object.key("imageManifest").string(var_96);
}
if let Some(var_97) = &input.image_manifest_media_type {
object.key("imageManifestMediaType").string(var_97);
}
if let Some(var_98) = &input.image_tag {
object.key("imageTag").string(var_98);
}
if let Some(var_99) = &input.image_digest {
object.key("imageDigest").string(var_99);
}
}
pub fn serialize_structure_crate_input_put_image_scanning_configuration_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutImageScanningConfigurationInput,
) {
if let Some(var_100) = &input.registry_id {
object.key("registryId").string(var_100);
}
if let Some(var_101) = &input.repository_name {
object.key("repositoryName").string(var_101);
}
if let Some(var_102) = &input.image_scanning_configuration {
let mut object_103 = object.key("imageScanningConfiguration").start_object();
crate::json_ser::serialize_structure_crate_model_image_scanning_configuration(
&mut object_103,
var_102,
);
object_103.finish();
}
}
pub fn serialize_structure_crate_input_put_image_tag_mutability_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutImageTagMutabilityInput,
) {
if let Some(var_104) = &input.registry_id {
object.key("registryId").string(var_104);
}
if let Some(var_105) = &input.repository_name {
object.key("repositoryName").string(var_105);
}
if let Some(var_106) = &input.image_tag_mutability {
object.key("imageTagMutability").string(var_106.as_str());
}
}
pub fn serialize_structure_crate_input_put_lifecycle_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutLifecyclePolicyInput,
) {
if let Some(var_107) = &input.registry_id {
object.key("registryId").string(var_107);
}
if let Some(var_108) = &input.repository_name {
object.key("repositoryName").string(var_108);
}
if let Some(var_109) = &input.lifecycle_policy_text {
object.key("lifecyclePolicyText").string(var_109);
}
}
pub fn serialize_structure_crate_input_put_registry_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutRegistryPolicyInput,
) {
if let Some(var_110) = &input.policy_text {
object.key("policyText").string(var_110);
}
}
pub fn serialize_structure_crate_input_put_replication_configuration_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutReplicationConfigurationInput,
) {
if let Some(var_111) = &input.replication_configuration {
let mut object_112 = object.key("replicationConfiguration").start_object();
crate::json_ser::serialize_structure_crate_model_replication_configuration(
&mut object_112,
var_111,
);
object_112.finish();
}
}
pub fn serialize_structure_crate_input_set_repository_policy_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::SetRepositoryPolicyInput,
) {
if let Some(var_113) = &input.registry_id {
object.key("registryId").string(var_113);
}
if let Some(var_114) = &input.repository_name {
object.key("repositoryName").string(var_114);
}
if let Some(var_115) = &input.policy_text {
object.key("policyText").string(var_115);
}
if input.force {
object.key("force").boolean(input.force);
}
}
pub fn serialize_structure_crate_input_start_image_scan_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::StartImageScanInput,
) {
if let Some(var_116) = &input.registry_id {
object.key("registryId").string(var_116);
}
if let Some(var_117) = &input.repository_name {
object.key("repositoryName").string(var_117);
}
if let Some(var_118) = &input.image_id {
let mut object_119 = object.key("imageId").start_object();
crate::json_ser::serialize_structure_crate_model_image_identifier(&mut object_119, var_118);
object_119.finish();
}
}
pub fn serialize_structure_crate_input_start_lifecycle_policy_preview_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::StartLifecyclePolicyPreviewInput,
) {
if let Some(var_120) = &input.registry_id {
object.key("registryId").string(var_120);
}
if let Some(var_121) = &input.repository_name {
object.key("repositoryName").string(var_121);
}
if let Some(var_122) = &input.lifecycle_policy_text {
object.key("lifecyclePolicyText").string(var_122);
}
}
pub fn serialize_structure_crate_input_tag_resource_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::TagResourceInput,
) {
if let Some(var_123) = &input.resource_arn {
object.key("resourceArn").string(var_123);
}
if let Some(var_124) = &input.tags {
let mut array_125 = object.key("tags").start_array();
for item_126 in var_124 {
{
let mut object_127 = array_125.value().start_object();
crate::json_ser::serialize_structure_crate_model_tag(&mut object_127, item_126);
object_127.finish();
}
}
array_125.finish();
}
}
pub fn serialize_structure_crate_input_untag_resource_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UntagResourceInput,
) {
if let Some(var_128) = &input.resource_arn {
object.key("resourceArn").string(var_128);
}
if let Some(var_129) = &input.tag_keys {
let mut array_130 = object.key("tagKeys").start_array();
for item_131 in var_129 {
{
array_130.value().string(item_131);
}
}
array_130.finish();
}
}
pub fn serialize_structure_crate_input_upload_layer_part_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UploadLayerPartInput,
) {
if let Some(var_132) = &input.registry_id {
object.key("registryId").string(var_132);
}
if let Some(var_133) = &input.repository_name {
object.key("repositoryName").string(var_133);
}
if let Some(var_134) = &input.upload_id {
object.key("uploadId").string(var_134);
}
if let Some(var_135) = &input.part_first_byte {
object.key("partFirstByte").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_135).into()),
);
}
if let Some(var_136) = &input.part_last_byte {
object.key("partLastByte").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_136).into()),
);
}
if let Some(var_137) = &input.layer_part_blob {
object
.key("layerPartBlob")
.string_unchecked(&smithy_types::base64::encode(var_137));
}
}
pub fn serialize_structure_crate_model_image_identifier(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ImageIdentifier,
) {
if let Some(var_138) = &input.image_digest {
object.key("imageDigest").string(var_138);
}
if let Some(var_139) = &input.image_tag {
object.key("imageTag").string(var_139);
}
}
pub fn serialize_structure_crate_model_tag(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Tag,
) {
if let Some(var_140) = &input.key {
object.key("Key").string(var_140);
}
if let Some(var_141) = &input.value {
object.key("Value").string(var_141);
}
}
pub fn serialize_structure_crate_model_image_scanning_configuration(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ImageScanningConfiguration,
) {
if input.scan_on_push {
object.key("scanOnPush").boolean(input.scan_on_push);
}
}
pub fn serialize_structure_crate_model_encryption_configuration(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::EncryptionConfiguration,
) {
if let Some(var_142) = &input.encryption_type {
object.key("encryptionType").string(var_142.as_str());
}
if let Some(var_143) = &input.kms_key {
object.key("kmsKey").string(var_143);
}
}
pub fn serialize_structure_crate_model_describe_images_filter(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DescribeImagesFilter,
) {
if let Some(var_144) = &input.tag_status {
object.key("tagStatus").string(var_144.as_str());
}
}
pub fn serialize_structure_crate_model_lifecycle_policy_preview_filter(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::LifecyclePolicyPreviewFilter,
) {
if let Some(var_145) = &input.tag_status {
object.key("tagStatus").string(var_145.as_str());
}
}
pub fn serialize_structure_crate_model_list_images_filter(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ListImagesFilter,
) {
if let Some(var_146) = &input.tag_status {
object.key("tagStatus").string(var_146.as_str());
}
}
pub fn serialize_structure_crate_model_replication_configuration(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ReplicationConfiguration,
) {
if let Some(var_147) = &input.rules {
let mut array_148 = object.key("rules").start_array();
for item_149 in var_147 {
{
let mut object_150 = array_148.value().start_object();
crate::json_ser::serialize_structure_crate_model_replication_rule(
&mut object_150,
item_149,
);
object_150.finish();
}
}
array_148.finish();
}
}
pub fn serialize_structure_crate_model_replication_rule(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ReplicationRule,
) {
if let Some(var_151) = &input.destinations {
let mut array_152 = object.key("destinations").start_array();
for item_153 in var_151 {
{
let mut object_154 = array_152.value().start_object();
crate::json_ser::serialize_structure_crate_model_replication_destination(
&mut object_154,
item_153,
);
object_154.finish();
}
}
array_152.finish();
}
}
pub fn serialize_structure_crate_model_replication_destination(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ReplicationDestination,
) {
if let Some(var_155) = &input.region {
object.key("region").string(var_155);
}
if let Some(var_156) = &input.registry_id {
object.key("registryId").string(var_156);
}
}
| 33.979112 | 100 | 0.646266 |
e67eaa5e6a8401eb56f96b4377657d48913e7f21 | 31,395 | macro_rules! curve_impl {
(
$name:expr,
$projective:ident,
$affine:ident,
$prepared:ident,
$basefield:ident,
$scalarfield:ident,
$uncompressed:ident,
$compressed:ident,
$pairing:ident,
$iso_1:expr,
$iso_2:expr
) => {
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct $affine {
pub(crate) x: $basefield,
pub(crate) y: $basefield,
pub(crate) infinity: bool,
}
impl ::std::fmt::Display for $affine {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
if self.infinity {
write!(f, "{}(Infinity)", $name)
} else {
write!(f, "{}(x={}, y={})", $name, self.x, self.y)
}
}
}
fn y2_from_x(x: $basefield) -> $basefield {
let mut y2 = x.clone();
y2.square();
y2.mul_assign(&x);
y2.add_assign(&$affine::get_coeff_b());
y2
}
#[derive(Copy, Clone, Debug, Eq)]
pub struct $projective {
pub(crate) x: $basefield,
pub(crate) y: $basefield,
pub(crate) z: $basefield,
}
impl ::std::fmt::Display for $projective {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(f, "{}", self.into_affine())
}
}
impl PartialEq for $projective {
fn eq(&self, other: &$projective) -> bool {
if self.is_zero() {
return other.is_zero();
}
if other.is_zero() {
return false;
}
if self.is_normalized() {
if other.is_normalized() {
return self.into_affine() == other.into_affine();
}
}
// The points (X, Y, Z) and (X', Y', Z')
// are equal when (X * Z^2) = (X' * Z'^2)
// and (Y * Z^3) = (Y' * Z'^3).
let mut z1 = self.z;
z1.square();
let mut z2 = other.z;
z2.square();
let mut tmp1 = self.x;
tmp1.mul_assign(&z2);
let mut tmp2 = other.x;
tmp2.mul_assign(&z1);
if tmp1 != tmp2 {
return false;
}
z1.mul_assign(&self.z);
z2.mul_assign(&other.z);
z2.mul_assign(&self.y);
z1.mul_assign(&other.y);
if z1 != z2 {
return false;
}
true
}
}
impl $projective {
/// Generic isogeny evaluation function.
pub(crate) fn eval_iso(&mut self, coeffs: [&[$basefield]; 4]) {
// Rust (still) can't handle generic array sizes (issue #43408)
let mut tmp = [$basefield::zero(); $iso_1];
let mut mapvals = [$basefield::zero(); 4];
// scope for pt borrow
{
// unpack input point
let x = &self.x;
let y = &self.y;
let z = &self.z;
// precompute powers of z
let zpows = {
let mut zpows = [$basefield::zero(); $iso_2];
zpows[0] = *z;
zpows[0].square(); // z^2
zpows[1] = zpows[0];
zpows[1].square(); // z^4
{
let (z_squared, rest) = zpows.split_at_mut(1);
for idx in 1..coeffs[2].len() - 2 {
if idx % 2 == 0 {
rest[idx] = rest[idx / 2 - 1];
rest[idx].square();
} else {
rest[idx] = rest[idx - 1];
rest[idx].mul_assign(&z_squared[0]);
}
}
}
zpows
};
for idx in 0..4 {
let clen = coeffs[idx].len() - 1;
// multiply coeffs by powers of Z
for jdx in 0..clen {
tmp[jdx] = coeffs[idx][clen - 1 - jdx];
tmp[jdx].mul_assign(&zpows[jdx]);
}
// compute map value by Horner's rule
mapvals[idx] = coeffs[idx][clen];
for tmpval in &tmp[..clen] {
mapvals[idx].mul_assign(x);
mapvals[idx].add_assign(tmpval);
}
}
// x denominator is order 1 less than x numerator, so we need an extra factor of Z^2
mapvals[1].mul_assign(&zpows[0]);
// multiply result of Y map by the y-coord, y / z^3
mapvals[2].mul_assign(y);
mapvals[3].mul_assign(z);
mapvals[3].mul_assign(&zpows[0]);
} // pt is no longer borrowed here
// hack to simultaneously access elements of tmp
let (xx, yy, zz) = {
let (xx, rest) = tmp.split_at_mut(1);
let (yy, rest) = rest.split_at_mut(1);
(&mut xx[0], &mut yy[0], &mut rest[0])
};
// compute Jacobian coordinates of resulting point
*zz = mapvals[1];
zz.mul_assign(&mapvals[3]); // Zout = xden * yden
*xx = mapvals[0];
xx.mul_assign(&mapvals[3]); // xnum * yden
xx.mul_assign(zz); // xnum * xden * yden^2
*yy = *zz;
yy.square(); // xden^2 * yden^2
yy.mul_assign(&mapvals[2]); // ynum * xden^2 * yden^2
yy.mul_assign(&mapvals[1]); // ynum * xden^3 * yden^2
self.x = *xx;
self.y = *yy;
self.z = *zz;
}
}
impl $affine {
fn mul_bits<S: AsRef<[u64]>>(&self, bits: BitIterator<S>) -> $projective {
let mut res = $projective::zero();
for i in bits {
res.double();
if i {
res.add_assign_mixed(self)
}
}
res
}
/// Attempts to construct an affine point given an x-coordinate. The
/// point is not guaranteed to be in the prime order subgroup.
///
/// If and only if `greatest` is set will the lexicographically
/// largest y-coordinate be selected.
fn get_point_from_x(x: $basefield, greatest: bool) -> Option<$affine> {
// Compute x^3 + b
let mut x3b = x;
x3b.square();
x3b.mul_assign(&x);
x3b.add_assign(&$affine::get_coeff_b());
x3b.sqrt().map(|y| {
let mut negy = y;
negy.negate();
$affine {
x: x,
y: if (y < negy) ^ greatest { y } else { negy },
infinity: false,
}
})
}
fn is_on_curve(&self) -> bool {
if self.is_zero() {
true
} else {
// Check that the point is on the curve
let mut y2 = self.y;
y2.square();
y2 == y2_from_x(self.x)
}
}
fn is_in_correct_subgroup_assuming_on_curve(&self) -> bool {
self.mul($scalarfield::char()).is_zero()
}
/// Implements the Shallue–van de Woestijne encoding described in
/// Section 3, "Indifferentiable Hashing to Barreto–Naehrig Curves"
/// from Foque-Tibouchi: <https://www.di.ens.fr/~fouque/pub/latincrypt12.pdf>.
///
/// The encoding is adapted for BLS12-381.
///
/// This encoding produces a point in E/E'. It does not reach every
/// point. The resulting point may not be in the prime order subgroup,
/// but it will be on the curve. It could be the point at infinity.
///
/// ## Description
///
/// Lemma 3 gives us three points:
///
/// x_1 = (-1 + sqrt(-3))/2 - (sqrt(-3) * t^2)/(1 + b + t^2)
/// x_2 = (-1 - sqrt(-3))/2 + (sqrt(-3) * t^2)/(1 + b + t^2)
/// x_3 = 1 - (1 + b + t^2)^2/(3 * t^2)
///
/// Given t != 0 and t != 1 + b + t^2 != 0, at least one of
/// these three points (x1, x2, x3) is valid on the curve.
///
/// In the paper, 1 + b + t^2 != 0 has no solutions, but for
/// E(Fq) in our construction, it does have two solutions.
/// We follow the convention of the paper by mapping these
/// to some arbitrary points; in our case, the positive/negative
/// fixed generator (with the parity of the y-coordinate
/// corresponding to the t value).
///
/// Unlike the paper, which maps t = 0 to an arbitrary point,
/// we map it to the point at infinity. This arrangement allows
/// us to preserve sw_encode(t) = sw_encode(-t) for all t.
///
/// We choose the smallest i such that x_i is on the curve.
/// We choose the corresponding y-coordinate with the same
/// parity, defined as the point being lexicographically larger
/// than its negative.
fn sw_encode(t: $basefield) -> Self {
// Handle the case t == 0
if t.is_zero() {
return Self::zero();
}
// We choose the corresponding y-coordinate with the same parity as t.
let parity = t.parity();
// w = (t^2 + b + 1)^(-1) * sqrt(-3) * t
let mut w = t;
w.square();
w.add_assign(&$affine::get_coeff_b());
w.add_assign(&$basefield::one());
// Handle the case t^2 + b + 1 == 0
if w.is_zero() {
let mut ret = Self::one();
if parity {
ret.negate()
}
return ret;
}
w = w.inverse().unwrap();
w.mul_assign(&$basefield::get_swenc_sqrt_neg_three());
w.mul_assign(&t);
// x1 = - wt + (sqrt(-3) - 1) / 2
let mut x1 = w;
x1.mul_assign(&t);
x1.negate();
x1.add_assign(&$basefield::get_swenc_sqrt_neg_three_minus_one_div_two());
if let Some(p) = Self::get_point_from_x(x1, parity) {
return p;
}
// x2 = -1 - x1
let mut x2 = x1;
x2.negate();
x2.sub_assign(&$basefield::one());
if let Some(p) = Self::get_point_from_x(x2, parity) {
return p;
}
// x3 = 1/w^2 + 1
let mut x3 = w;
x3.square();
x3 = x3.inverse().unwrap();
x3.add_assign(&$basefield::one());
Self::get_point_from_x(x3, parity)
.expect("this point must be valid if the other two are not")
}
}
impl CurveAffine for $affine {
type Engine = Bls12;
type Scalar = $scalarfield;
type Base = $basefield;
type Projective = $projective;
type Uncompressed = $uncompressed;
type Compressed = $compressed;
fn zero() -> Self {
$affine {
x: $basefield::zero(),
y: $basefield::one(),
infinity: true,
}
}
fn one() -> Self {
Self::get_generator()
}
fn is_zero(&self) -> bool {
self.infinity
}
fn mul<S: Into<<Self::Scalar as PrimeField>::Repr>>(&self, by: S) -> $projective {
let bits = BitIterator::new(by.into());
self.mul_bits(bits)
}
fn negate(&mut self) {
if !self.is_zero() {
self.y.negate();
}
}
fn into_projective(&self) -> $projective {
(*self).into()
}
}
impl PairingCurveAffine for $affine {
type Prepared = $prepared;
type Pair = $pairing;
type PairingResult = Fq12;
fn prepare(&self) -> Self::Prepared {
$prepared::from_affine(*self)
}
fn pairing_with(&self, other: &Self::Pair) -> Self::PairingResult {
self.perform_pairing(other)
}
}
impl CurveProjective for $projective {
type Engine = Bls12;
type Scalar = $scalarfield;
type Base = $basefield;
type Affine = $affine;
fn random<R: RngCore>(rng: &mut R) -> Self {
loop {
let x = $basefield::random(rng);
let greatest = rng.next_u32() % 2 != 0;
if let Some(p) = $affine::get_point_from_x(x, greatest) {
let p = p.scale_by_cofactor();
if !p.is_zero() {
return p;
}
}
}
}
// The point at infinity is always represented by
// Z = 0.
fn zero() -> Self {
$projective {
x: $basefield::zero(),
y: $basefield::one(),
z: $basefield::zero(),
}
}
fn one() -> Self {
$affine::one().into()
}
// The point at infinity is always represented by
// Z = 0.
fn is_zero(&self) -> bool {
self.z.is_zero()
}
fn is_normalized(&self) -> bool {
self.is_zero() || self.z == $basefield::one()
}
fn batch_normalization<S: std::borrow::BorrowMut<Self>>(v: &mut [S]) {
// Montgomery’s Trick and Fast Implementation of Masked AES
// Genelle, Prouff and Quisquater
// Section 3.2
// First pass: compute [a, ab, abc, ...]
let mut prod = Vec::with_capacity(v.len());
let mut tmp = $basefield::one();
for g in v
.iter_mut()
.map(|g| g.borrow_mut())
// Ignore normalized elements
.filter(|g| !g.is_normalized())
{
tmp.mul_assign(&g.z);
prod.push(tmp);
}
// Invert `tmp`.
tmp = tmp.inverse().unwrap(); // Guaranteed to be nonzero.
// Second pass: iterate backwards to compute inverses
for (g, s) in v
.iter_mut()
.map(|g| g.borrow_mut())
// Backwards
.rev()
// Ignore normalized elements
.filter(|g| !g.is_normalized())
// Backwards, skip last element, fill in one for last term.
.zip(
prod.into_iter()
.rev()
.skip(1)
.chain(Some($basefield::one())),
)
{
// tmp := tmp * g.z; g.z := tmp * s = 1/z
let mut newtmp = tmp;
newtmp.mul_assign(&g.z);
g.z = tmp;
g.z.mul_assign(&s);
tmp = newtmp;
}
// Perform affine transformations
for g in v
.iter_mut()
.map(|g| g.borrow_mut())
.filter(|g| !g.is_normalized())
{
let mut z = g.z; // 1/z
z.square(); // 1/z^2
g.x.mul_assign(&z); // x/z^2
z.mul_assign(&g.z); // 1/z^3
g.y.mul_assign(&z); // y/z^3
g.z = $basefield::one(); // z = 1
}
}
fn double(&mut self) {
if self.is_zero() {
return;
}
// Other than the point at infinity, no points on E or E'
// can double to equal the point at infinity, as y=0 is
// never true for points on the curve. (-4 and -4u-4
// are not cubic residue in their respective fields.)
// http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-dbl-2009-l
// A = X1^2
let mut a = self.x;
a.square();
// B = Y1^2
let mut b = self.y;
b.square();
// C = B^2
let mut c = b;
c.square();
// D = 2*((X1+B)2-A-C)
let mut d = self.x;
d.add_assign(&b);
d.square();
d.sub_assign(&a);
d.sub_assign(&c);
d.double();
// E = 3*A
let mut e = a;
e.double();
e.add_assign(&a);
// F = E^2
let mut f = e;
f.square();
// Z3 = 2*Y1*Z1
self.z.mul_assign(&self.y);
self.z.double();
// X3 = F-2*D
self.x = f;
self.x.sub_assign(&d);
self.x.sub_assign(&d);
// Y3 = E*(D-X3)-8*C
self.y = d;
self.y.sub_assign(&self.x);
self.y.mul_assign(&e);
c.double();
c.double();
c.double();
self.y.sub_assign(&c);
}
fn add_assign(&mut self, other: &Self) {
if self.is_zero() {
*self = *other;
return;
}
if other.is_zero() {
return;
}
// http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-add-2007-bl
// Z1Z1 = Z1^2
let mut z1z1 = self.z;
z1z1.square();
// Z2Z2 = Z2^2
let mut z2z2 = other.z;
z2z2.square();
// U1 = X1*Z2Z2
let mut u1 = self.x;
u1.mul_assign(&z2z2);
// U2 = X2*Z1Z1
let mut u2 = other.x;
u2.mul_assign(&z1z1);
// S1 = Y1*Z2*Z2Z2
let mut s1 = self.y;
s1.mul_assign(&other.z);
s1.mul_assign(&z2z2);
// S2 = Y2*Z1*Z1Z1
let mut s2 = other.y;
s2.mul_assign(&self.z);
s2.mul_assign(&z1z1);
if u1 == u2 && s1 == s2 {
// The two points are equal, so we double.
self.double();
} else {
// If we're adding -a and a together, self.z becomes zero as H becomes zero.
// H = U2-U1
let mut h = u2;
h.sub_assign(&u1);
// I = (2*H)^2
let mut i = h;
i.double();
i.square();
// J = H*I
let mut j = h;
j.mul_assign(&i);
// r = 2*(S2-S1)
let mut r = s2;
r.sub_assign(&s1);
r.double();
// V = U1*I
let mut v = u1;
v.mul_assign(&i);
// X3 = r^2 - J - 2*V
self.x = r;
self.x.square();
self.x.sub_assign(&j);
self.x.sub_assign(&v);
self.x.sub_assign(&v);
// Y3 = r*(V - X3) - 2*S1*J
self.y = v;
self.y.sub_assign(&self.x);
self.y.mul_assign(&r);
s1.mul_assign(&j); // S1 = S1 * J * 2
s1.double();
self.y.sub_assign(&s1);
// Z3 = ((Z1+Z2)^2 - Z1Z1 - Z2Z2)*H
self.z.add_assign(&other.z);
self.z.square();
self.z.sub_assign(&z1z1);
self.z.sub_assign(&z2z2);
self.z.mul_assign(&h);
}
}
fn add_assign_mixed(&mut self, other: &Self::Affine) {
if other.is_zero() {
return;
}
if self.is_zero() {
self.x = other.x;
self.y = other.y;
self.z = $basefield::one();
return;
}
// http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-madd-2007-bl
// Z1Z1 = Z1^2
let mut z1z1 = self.z;
z1z1.square();
// U2 = X2*Z1Z1
let mut u2 = other.x;
u2.mul_assign(&z1z1);
// S2 = Y2*Z1*Z1Z1
let mut s2 = other.y;
s2.mul_assign(&self.z);
s2.mul_assign(&z1z1);
if self.x == u2 && self.y == s2 {
// The two points are equal, so we double.
self.double();
} else {
// If we're adding -a and a together, self.z becomes zero as H becomes zero.
// H = U2-X1
let mut h = u2;
h.sub_assign(&self.x);
// HH = H^2
let mut hh = h;
hh.square();
// I = 4*HH
let mut i = hh;
i.double();
i.double();
// J = H*I
let mut j = h;
j.mul_assign(&i);
// r = 2*(S2-Y1)
let mut r = s2;
r.sub_assign(&self.y);
r.double();
// V = X1*I
let mut v = self.x;
v.mul_assign(&i);
// X3 = r^2 - J - 2*V
self.x = r;
self.x.square();
self.x.sub_assign(&j);
self.x.sub_assign(&v);
self.x.sub_assign(&v);
// Y3 = r*(V-X3)-2*Y1*J
j.mul_assign(&self.y); // J = 2*Y1*J
j.double();
self.y = v;
self.y.sub_assign(&self.x);
self.y.mul_assign(&r);
self.y.sub_assign(&j);
// Z3 = (Z1+H)^2-Z1Z1-HH
self.z.add_assign(&h);
self.z.square();
self.z.sub_assign(&z1z1);
self.z.sub_assign(&hh);
}
}
fn negate(&mut self) {
if !self.is_zero() {
self.y.negate()
}
}
fn mul_assign<S: Into<<Self::Scalar as PrimeField>::Repr>>(&mut self, other: S) {
let mut res = Self::zero();
let mut found_one = false;
for i in BitIterator::new(other.into()) {
if found_one {
res.double();
} else {
found_one = i;
}
if i {
res.add_assign(self);
}
}
*self = res;
}
fn into_affine(&self) -> $affine {
(*self).into()
}
fn recommended_wnaf_for_scalar(scalar: <Self::Scalar as PrimeField>::Repr) -> usize {
Self::empirical_recommended_wnaf_for_scalar(scalar)
}
fn recommended_wnaf_for_num_scalars(num_scalars: usize) -> usize {
Self::empirical_recommended_wnaf_for_num_scalars(num_scalars)
}
/// Implements "Indifferentiable Hashing to Barreto–Naehrig Curves" from Foque-Tibouchi.
/// <https://www.di.ens.fr/~fouque/pub/latincrypt12.pdf>
fn hash(msg: &[u8]) -> Self {
// The construction of Foque et al. requires us to construct two
// "random oracles" in the field, encode their image with `sw_encode`,
// and finally add them.
// We construct them appending to the message the string
// $name_$oracle
// For instance, the first oracle in group G1 appends: "G1_0".
let mut hasher_0 = blake2b_simd::State::new();
hasher_0.update(msg);
#[allow(clippy::string_lit_as_bytes)]
hasher_0.update($name.as_bytes());
let mut hasher_1 = hasher_0.clone();
hasher_0.update(b"_0");
let t0 = Self::Base::hash(hasher_0);
let t0 = Self::Affine::sw_encode(t0);
hasher_1.update(b"_1");
let t1 = Self::Base::hash(hasher_1);
let t1 = Self::Affine::sw_encode(t1);
let mut res = t0.into_projective();
res.add_assign_mixed(&t1);
res.into_affine().scale_by_cofactor()
}
}
// The affine point X, Y is represented in the jacobian
// coordinates with Z = 1.
impl From<$affine> for $projective {
fn from(p: $affine) -> $projective {
if p.is_zero() {
$projective::zero()
} else {
$projective {
x: p.x,
y: p.y,
z: $basefield::one(),
}
}
}
}
// The projective point X, Y, Z is represented in the affine
// coordinates as X/Z^2, Y/Z^3.
impl From<$projective> for $affine {
fn from(p: $projective) -> $affine {
if p.is_zero() {
$affine::zero()
} else if p.z == $basefield::one() {
// If Z is one, the point is already normalized.
$affine {
x: p.x,
y: p.y,
infinity: false,
}
} else {
// Z is nonzero, so it must have an inverse in a field.
let zinv = p.z.inverse().unwrap();
let mut zinv_powered = zinv;
zinv_powered.square();
// X/Z^2
let mut x = p.x;
x.mul_assign(&zinv_powered);
// Y/Z^3
let mut y = p.y;
zinv_powered.mul_assign(&zinv);
y.mul_assign(&zinv_powered);
$affine {
x: x,
y: y,
infinity: false,
}
}
}
}
#[cfg(test)]
use rand_core::SeedableRng;
#[cfg(test)]
use rand_xorshift::XorShiftRng;
#[test]
fn test_hash() {
let mut rng = XorShiftRng::from_seed([
0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06,
0xbc, 0xe5,
]);
let mut seed: [u8; 32] = [0u8; 32];
for _ in 0..100 {
rng.fill_bytes(&mut seed);
let p = $projective::hash(&seed).into_affine();
assert!(!p.is_zero());
assert!(p.is_on_curve());
assert!(p.is_in_correct_subgroup_assuming_on_curve());
}
}
#[test]
fn test_sw_encode() {
let mut rng = XorShiftRng::from_seed([
0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06,
0xbc, 0xe5,
]);
for _ in 0..100 {
let mut t = $basefield::random(&mut rng);
let p = $affine::sw_encode(t);
assert!(p.is_on_curve());
assert!(!p.is_zero());
t.negate();
let mut minus_p = $affine::sw_encode(t).into_projective();
minus_p.add_assign_mixed(&p);
assert!(minus_p.is_zero());
}
}
};
}
macro_rules! encoded_point_delegations {
($t:ident) => {
impl AsRef<[u8]> for $t {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl AsMut<[u8]> for $t {
fn as_mut(&mut self) -> &mut [u8] {
&mut self.0
}
}
impl PartialEq for $t {
fn eq(&self, other: &$t) -> bool {
PartialEq::eq(&self.0[..], &other.0[..])
}
}
impl Eq for $t {}
impl PartialOrd for $t {
fn partial_cmp(&self, other: &$t) -> Option<::std::cmp::Ordering> {
PartialOrd::partial_cmp(&self.0[..], &other.0[..])
}
}
impl Ord for $t {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
Ord::cmp(&self.0[..], &other.0[..])
}
}
impl ::std::hash::Hash for $t {
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
self.0[..].hash(state);
}
}
};
} // encoded_point_delegations
mod chain;
mod g1;
mod g2;
mod util;
pub use self::g1::*;
pub use self::g2::*;
| 33.830819 | 105 | 0.384966 |
03d34331e4b60303277031af8cc629a6e3eb6c85 | 2,574 | use crate::config;
use crate::db::{Db, Dir};
use crate::util;
use anyhow::{bail, Context, Result};
use structopt::StructOpt;
use std::fs;
use std::path::{Path, PathBuf};
/// Import from z database
#[derive(Debug, StructOpt)]
#[structopt()]
pub struct Import {
path: PathBuf,
/// Merge entries into existing database
#[structopt(long)]
merge: bool,
}
impl Import {
pub fn run(&self) -> Result<()> {
import(&self.path, self.merge)
}
}
fn import<P: AsRef<Path>>(path: P, merge: bool) -> Result<()> {
let path = path.as_ref();
let mut db = util::get_db()?;
if !db.dirs.is_empty() && !merge {
bail!(
"To prevent conflicts, you can only import from z with an empty zoxide database!\n\
If you wish to merge the two, specify the `--merge` flag."
);
}
let buffer = fs::read_to_string(&path)
.with_context(|| format!("could not read z database: {}", path.display()))?;
for (idx, line) in buffer.lines().enumerate() {
if let Err(e) = import_line(&mut db, line, config::zo_resolve_symlinks()) {
let line_num = idx + 1;
eprintln!("Error on line {}: {}", line_num, e);
}
}
db.modified = true;
println!("Completed import.");
Ok(())
}
fn import_line(db: &mut Db, line: &str, resolve_symlinks: bool) -> Result<()> {
let mut split_line = line.rsplitn(3, '|');
let (path, epoch_str, rank_str) = (|| {
let epoch_str = split_line.next()?;
let rank_str = split_line.next()?;
let path = split_line.next()?;
Some((path, epoch_str, rank_str))
})()
.with_context(|| format!("invalid entry: {}", line))?;
let epoch = epoch_str
.parse::<i64>()
.with_context(|| format!("invalid epoch: {}", epoch_str))?;
let rank = rank_str
.parse::<f64>()
.with_context(|| format!("invalid rank: {}", rank_str))?;
let path = if resolve_symlinks {
util::canonicalize(&path)?
} else {
util::resolve_path(&path)?
};
let path = util::path_to_str(&path)?;
// If the path exists in the database, add the ranks and set the epoch to
// the more recent of the parsed epoch and the already present epoch.
if let Some(dir) = db.dirs.iter_mut().find(|dir| dir.path == path) {
dir.rank += rank;
dir.last_accessed = epoch.max(dir.last_accessed);
} else {
db.dirs.push(Dir {
path: path.to_string(),
rank,
last_accessed: epoch,
});
}
Ok(())
}
| 26.8125 | 95 | 0.568376 |
2f8fa6c21f64dd4d44c9d3ed2e716b25c502dff7 | 1,444 | use std::{
env, fs,
io::{self, Write},
};
use noodles::{
bcf,
vcf::{
self,
record::info::field::{Key, Value},
},
};
fn get_allele_count(
record: &bcf::Record,
header: &vcf::Header,
string_map: &bcf::header::StringMap,
) -> io::Result<Option<i32>> {
Ok(record
.info()
.get(header, string_map, &Key::TotalAlleleCount)
.transpose()?
.and_then(|field| match field.value() {
Some(Value::Integer(allele_count)) => Some(*allele_count),
_ => None,
}))
}
fn main() -> io::Result<()> {
let path = env::args().nth(1).expect("missing BCF path");
let mut reader = fs::File::open(path)
.map(io::BufReader::new)
.map(bcf::Reader::new)?;
reader.read_file_format()?;
let raw_header = reader.read_header()?;
let header = raw_header.parse().expect("error parsing header");
let string_map = raw_header.parse().expect("error parsing header");
let mut record = bcf::Record::default();
let mut allele_counts = Vec::new();
while reader.read_record(&mut record)? != 0 {
let allele_count = get_allele_count(&record, &header, &string_map)?
.expect("missing or unexpected AN field");
allele_counts.push(allele_count);
}
let total = allele_counts.iter().sum::<i32>();
let n = allele_counts.len();
writeln!(io::stderr(), "{}", total as f64 / n as f64)
}
| 26.740741 | 75 | 0.57964 |
799c4b0e9fd7a4c26866b768f47acdfe8d599f6e | 7,827 | use std::collections::HashMap;
use std::fmt::Write;
use cast::u64;
use quote::Tokens;
use svd::Peripheral;
use syn::Ident;
use errors::*;
use util::{self, ToSanitizedUpperCase};
use Target;
/// Generates code for `src/interrupt.rs`
pub fn render(
target: &Target,
peripherals: &[Peripheral],
device_x: &mut String,
) -> Result<Vec<Tokens>> {
let interrupts = peripherals
.iter()
.flat_map(|p| p.interrupt.iter())
.map(|i| (i.value, i))
.collect::<HashMap<_, _>>();
let mut interrupts = interrupts.into_iter().map(|(_, v)| v).collect::<Vec<_>>();
interrupts.sort_by_key(|i| i.value);
let mut root = vec![];
let mut arms = vec![];
let mut from_arms = vec![];
let mut elements = vec![];
let mut names = vec![];
let mut variants = vec![];
// Current position in the vector table
let mut pos = 0;
let mut mod_items = vec![];
for interrupt in &interrupts {
while pos < interrupt.value {
elements.push(quote!(Vector { _reserved: 0 }));
pos += 1;
}
pos += 1;
let name_uc = Ident::new(interrupt.name.to_sanitized_upper_case());
let description = format!(
"{} - {}",
interrupt.value,
interrupt
.description
.as_ref()
.map(|s| util::respace(s))
.as_ref()
.map(|s| util::escape_brackets(s))
.unwrap_or_else(|| interrupt.name.clone())
);
let value = util::unsuffixed(u64(interrupt.value));
variants.push(quote! {
#[doc = #description]
#name_uc,
});
arms.push(quote! {
Interrupt::#name_uc => #value,
});
from_arms.push(quote! {
#value => Ok(Interrupt::#name_uc),
});
elements.push(quote!(Vector { _handler: #name_uc }));
names.push(name_uc);
}
let n = util::unsuffixed(u64(pos));
match *target {
Target::CortexM => {
for name in &names {
writeln!(device_x, "PROVIDE({} = DefaultHandler);" ,name).unwrap();
}
root.push(quote! {
#[cfg(feature = "rt")]
extern "C" {
#(fn #names();)*
}
#[doc(hidden)]
pub union Vector {
_handler: unsafe extern "C" fn(),
_reserved: u32,
}
#[cfg(feature = "rt")]
#[doc(hidden)]
#[link_section = ".vector_table.interrupts"]
#[no_mangle]
pub static __INTERRUPTS: [Vector; #n] = [
#(#elements,)*
];
});
}
Target::Msp430 => {
let aliases = names
.iter()
.map(|n| {
format!(
"
.weak {0}
{0} = DH_TRAMPOLINE",
n
)
})
.collect::<Vec<_>>()
.concat();
mod_items.push(quote! {
#[cfg(feature = "rt")]
global_asm!("
DH_TRAMPOLINE:
br #DEFAULT_HANDLER
");
#[cfg(feature = "rt")]
global_asm!(#aliases);
#[cfg(feature = "rt")]
extern "msp430-interrupt" {
#(fn #names();)*
}
#[doc(hidden)]
pub union Vector {
_handler: unsafe extern "msp430-interrupt" fn(),
_reserved: u32,
}
#[allow(renamed_and_removed_lints)]
// This currently breaks on nightly, to be removed with the line above once 1.31 is stable
#[allow(private_no_mangle_statics)]
#[cfg(feature = "rt")]
#[doc(hidden)]
#[link_section = ".vector_table.interrupts"]
#[no_mangle]
#[used]
pub static INTERRUPTS:
[Vector; #n] = [
#(#elements,)*
];
});
}
Target::RISCV => {}
Target::None => {}
}
let interrupt_enum = quote! {
/// Enumeration of all the interrupts
pub enum Interrupt {
#(#variants)*
}
unsafe impl ::bare_metal::Nr for Interrupt {
#[inline]
fn nr(&self) -> u8 {
match *self {
#(#arms)*
}
}
}
};
if *target == Target::CortexM {
root.push(interrupt_enum);
} else {
mod_items.push(quote! {
#interrupt_enum
#[derive(Debug, Copy, Clone)]
pub struct TryFromInterruptError(());
impl Interrupt {
#[inline]
pub fn try_from(value: u8) -> Result<Self, TryFromInterruptError> {
match value {
#(#from_arms)*
_ => Err(TryFromInterruptError(())),
}
}
}
});
}
if *target != Target::None {
let abi = match *target {
Target::Msp430 => "msp430-interrupt",
_ => "C",
};
if *target != Target::CortexM {
mod_items.push(quote! {
#[cfg(feature = "rt")]
#[macro_export]
macro_rules! interrupt {
($NAME:ident, $path:path, locals: {
$($lvar:ident:$lty:ty = $lval:expr;)*
}) => {
#[allow(non_snake_case)]
mod $NAME {
pub struct Locals {
$(
pub $lvar: $lty,
)*
}
}
#[allow(non_snake_case)]
#[no_mangle]
pub extern #abi fn $NAME() {
// check that the handler exists
let _ = $crate::interrupt::Interrupt::$NAME;
static mut LOCALS: self::$NAME::Locals =
self::$NAME::Locals {
$(
$lvar: $lval,
)*
};
// type checking
let f: fn(&mut self::$NAME::Locals) = $path;
f(unsafe { &mut LOCALS });
}
};
($NAME:ident, $path:path) => {
#[allow(non_snake_case)]
#[no_mangle]
pub extern #abi fn $NAME() {
// check that the handler exists
let _ = $crate::interrupt::Interrupt::$NAME;
// type checking
let f: fn() = $path;
f();
}
}
}
});
}
}
if !interrupts.is_empty() {
if *target != Target::CortexM {
root.push(quote! {
#[doc(hidden)]
pub mod interrupt {
#(#mod_items)*
}
});
root.push(quote! {
pub use self::interrupt::Interrupt;
});
}
}
Ok(root)
}
| 28.988889 | 106 | 0.382394 |
f5812349b2666a2597516999125161a1039b0af5 | 40,855 | #![allow(bad_style)]
#![allow(dead_code)]
#![allow(improper_ctypes)]
#![no_std]
use core::ptr::null_mut;
use libc::FILE;
use libc::{c_char, c_double, c_float, c_int, c_long, c_uchar, c_uint, c_void};
use va_list::VaList;
pub type CSOUND_STATUS = c_int;
pub const CSOUND_SIGNAL: CSOUND_STATUS = -5;
pub const CSOUND_MEMORY: CSOUND_STATUS = -4;
pub const CSOUND_PERFORMANCE: CSOUND_STATUS = -3;
pub const CSOUND_INITIALIZATION: CSOUND_STATUS = -2;
pub const CSOUND_ERROR: CSOUND_STATUS = -1;
pub const CSOUND_SUCCESS: CSOUND_STATUS = 0;
pub type controlChannelType = c_uint;
pub const CSOUND_CONTROL_CHANNEL: controlChannelType = 1;
pub const CSOUND_AUDIO_CHANNEL: controlChannelType = 2;
pub const CSOUND_STRING_CHANNEL: controlChannelType = 3;
pub const CSOUND_PVS_CHANNEL: controlChannelType = 4;
pub const CSOUND_VAR_CHANNEL: controlChannelType = 5;
pub const CSOUND_CHANNEL_TYPE_MASK: controlChannelType = 15;
pub const CSOUND_INPUT_CHANNEL: controlChannelType = 16;
pub const CSOUND_OUTPUT_CHANNEL: controlChannelType = 32;
pub type controlChannelBehavior = c_uint;
pub const CSOUND_CONTROL_CHANNEL_NO_HINTS: controlChannelBehavior = 0;
pub const CSOUND_CONTROL_CHANNEL_INT: controlChannelBehavior = 1;
pub const CSOUND_CONTROL_CHANNEL_LIN: controlChannelBehavior = 2;
pub const CSOUND_CONTROL_CHANNEL_EXP: controlChannelBehavior = 3;
pub type csound_file_open_callback = extern "C" fn(*mut CSOUND, *const c_char, c_int, c_int, c_int);
pub type csound_open_callback = Option<extern "C" fn(*mut CSOUND, *const csRtAudioParams) -> c_int>;
pub type csound_rt_play_callback = Option<extern "C" fn(*mut CSOUND, *const c_double, c_int)>;
pub type csound_rt_rec_callback = Option<extern "C" fn(*mut CSOUND, *mut c_double, c_int) -> c_int>;
pub type csound_rt_close_callback = Option<extern "C" fn(*mut CSOUND)>;
pub type cscore_callback_type = Option<extern "C" fn(*mut CSOUND)>;
pub type csound_dev_list_callback =
Option<extern "C" fn(*mut CSOUND, *mut CS_AUDIODEVICE, c_int) -> c_int>;
pub type csound_midi_dev_list_callback =
Option<extern "C" fn(*mut CSOUND, *mut CS_MIDIDEVICE, c_int) -> c_int>;
pub type csound_ext_midi_open_callback =
Option<extern "C" fn(*mut CSOUND, *mut *mut c_void, *const c_char) -> c_int>;
pub type csound_ext_midi_close_callback =
Option<extern "C" fn(arg1: *mut CSOUND, userData: *mut c_void) -> c_int>;
pub type csound_ext_midi_read_data_callback =
Option<extern "C" fn(*mut CSOUND, *mut c_void, *mut c_uchar, c_int) -> c_int>;
pub type csound_ext_midi_write_data_callback =
Option<extern "C" fn(*mut CSOUND, *mut c_void, *const c_uchar, c_int) -> c_int>;
pub type csound_ext_midi_error_callback = Option<extern "C" fn(c_int) -> *const c_char>;
pub type csound_message_callback = extern "C" fn(*mut CSOUND, c_int, *const c_char);
pub type default_message_callback = extern "C" fn(*mut CSOUND, c_int, *const c_char, VaList);
pub type csound_channel_callback =
extern "C" fn(*mut CSOUND, *const c_char, *mut c_void, *const c_void);
pub const CSOUND_EXITJMP_SUCCESS: u32 = 256;
pub const CSOUNDINIT_NO_SIGNAL_HANDLER: u32 = 1;
pub const CSOUNDINIT_NO_ATEXIT: u32 = 2;
pub const CSOUND_CALLBACK_KBD_EVENT: u32 = 1;
pub const CSOUND_CALLBACK_KBD_TEXT: u32 = 2;
pub const CSOUNDCFG_INTEGER: u32 = 1;
pub const CSOUNDCFG_BOOLEAN: u32 = 2;
pub const CSOUNDCFG_FLOAT: u32 = 3;
pub const CSOUNDCFG_DOUBLE: u32 = 4;
pub const CSOUNDCFG_MYFLT: u32 = 5;
pub const CSOUNDCFG_STRING: u32 = 6;
pub const CSOUNDCFG_POWOFTWO: u32 = 1;
pub const CSOUNDCFG_SUCCESS: u32 = 0;
pub const CSOUNDCFG_INVALID_NAME: i32 = -1;
pub const CSOUNDCFG_INVALID_TYPE: i32 = -2;
pub const CSOUNDCFG_INVALID_FLAG: i32 = -3;
pub const CSOUNDCFG_NULL_POINTER: i32 = -4;
pub const CSOUNDCFG_TOO_HIGH: i32 = -5;
pub const CSOUNDCFG_TOO_LOW: i32 = -6;
pub const CSOUNDCFG_NOT_POWOFTWO: i32 = -7;
pub const CSOUNDCFG_INVALID_BOOLEAN: i32 = -8;
pub const CSOUNDCFG_MEMORY: i32 = -9;
pub const CSOUNDCFG_STRING_LENGTH: i32 = -10;
pub const CSOUNDCFG_LASTERROR: i32 = -10;
pub const CSOUNDMSG_DEFAULT: u32 = 0;
pub const CSOUNDMSG_ERROR: u32 = 4096;
pub const CSOUNDMSG_ORCH: u32 = 8192;
pub const CSOUNDMSG_REALTIME: u32 = 12288;
pub const CSOUNDMSG_WARNING: u32 = 16384;
pub const CSOUNDMSG_STDOUT: u32 = 20480;
pub const CSOUNDMSG_FG_BLACK: u32 = 256;
pub const CSOUNDMSG_FG_RED: u32 = 257;
pub const CSOUNDMSG_FG_GREEN: u32 = 258;
pub const CSOUNDMSG_FG_YELLOW: u32 = 259;
pub const CSOUNDMSG_FG_BLUE: u32 = 260;
pub const CSOUNDMSG_FG_MAGENTA: u32 = 261;
pub const CSOUNDMSG_FG_CYAN: u32 = 262;
pub const CSOUNDMSG_FG_WHITE: u32 = 263;
pub const CSOUNDMSG_FG_BOLD: u32 = 8;
pub const CSOUNDMSG_FG_UNDERLINE: u32 = 128;
pub const CSOUNDMSG_BG_BLACK: u32 = 512;
pub const CSOUNDMSG_BG_RED: u32 = 528;
pub const CSOUNDMSG_BG_GREEN: u32 = 544;
pub const CSOUNDMSG_BG_ORANGE: u32 = 560;
pub const CSOUNDMSG_BG_BLUE: u32 = 576;
pub const CSOUNDMSG_BG_MAGENTA: u32 = 592;
pub const CSOUNDMSG_BG_CYAN: u32 = 608;
pub const CSOUNDMSG_BG_GREY: u32 = 624;
pub const CSOUNDMSG_TYPE_MASK: u32 = 28672;
pub const CSOUNDMSG_FG_COLOR_MASK: u32 = 263;
pub const CSOUNDMSG_FG_ATTR_MASK: u32 = 136;
pub const CSOUNDMSG_BG_COLOR_MASK: u32 = 624;
pub type csLenguage_t = u32;
pub const CSLANGUAGE_DEFAULT: csLenguage_t = 0;
pub const CSLANGUAGE_AFRIKAANS: csLenguage_t = 1;
pub const CSLANGUAGE_ALBANIAN: csLenguage_t = 2;
pub const CSLANGUAGE_ARABIC: csLenguage_t = 3;
pub const CSLANGUAGE_ARMENIAN: csLenguage_t = 4;
pub const CSLANGUAGE_ASSAMESE: csLenguage_t = 5;
pub const CSLANGUAGE_AZERI: csLenguage_t = 6;
pub const CSLANGUAGE_BASQUE: csLenguage_t = 7;
pub const CSLANGUAGE_BELARUSIAN: csLenguage_t = 8;
pub const CSLANGUAGE_BENGALI: csLenguage_t = 9;
pub const CSLANGUAGE_BULGARIAN: csLenguage_t = 10;
pub const CSLANGUAGE_CATALAN: csLenguage_t = 11;
pub const CSLANGUAGE_CHINESE: csLenguage_t = 12;
pub const CSLANGUAGE_CROATIAN: csLenguage_t = 13;
pub const CSLANGUAGE_CZECH: csLenguage_t = 14;
pub const CSLANGUAGE_DANISH: csLenguage_t = 15;
pub const CSLANGUAGE_DUTCH: csLenguage_t = 16;
pub const CSLANGUAGE_ENGLISH_UK: csLenguage_t = 17;
pub const CSLANGUAGE_ENGLISH_US: csLenguage_t = 18;
pub const CSLANGUAGE_ESTONIAN: csLenguage_t = 19;
pub const CSLANGUAGE_FAEROESE: csLenguage_t = 20;
pub const CSLANGUAGE_FARSI: csLenguage_t = 21;
pub const CSLANGUAGE_FINNISH: csLenguage_t = 22;
pub const CSLANGUAGE_FRENCH: csLenguage_t = 23;
pub const CSLANGUAGE_GEORGIAN: csLenguage_t = 24;
pub const CSLANGUAGE_GERMAN: csLenguage_t = 25;
pub const CSLANGUAGE_GREEK: csLenguage_t = 26;
pub const CSLANGUAGE_GUJARATI: csLenguage_t = 27;
pub const CSLANGUAGE_HEBREW: csLenguage_t = 28;
pub const CSLANGUAGE_HINDI: csLenguage_t = 29;
pub const CSLANGUAGE_HUNGARIAN: csLenguage_t = 30;
pub const CSLANGUAGE_ICELANDIC: csLenguage_t = 31;
pub const CSLANGUAGE_INDONESIAN: csLenguage_t = 32;
pub const CSLANGUAGE_ITALIAN: csLenguage_t = 33;
pub const CSLANGUAGE_JAPANESE: csLenguage_t = 34;
pub const CSLANGUAGE_KANNADA: csLenguage_t = 35;
pub const CSLANGUAGE_KASHMIRI: csLenguage_t = 36;
pub const CSLANGUAGE_KAZAK: csLenguage_t = 37;
pub const CSLANGUAGE_KONKANI: csLenguage_t = 38;
pub const CSLANGUAGE_KOREAN: csLenguage_t = 39;
pub const CSLANGUAGE_LATVIAN: csLenguage_t = 40;
pub const CSLANGUAGE_LITHUANIAN: csLenguage_t = 41;
pub const CSLANGUAGE_MACEDONIAN: csLenguage_t = 42;
pub const CSLANGUAGE_MALAY: csLenguage_t = 43;
pub const CSLANGUAGE_MALAYALAM: csLenguage_t = 44;
pub const CSLANGUAGE_MANIPURI: csLenguage_t = 45;
pub const CSLANGUAGE_MARATHI: csLenguage_t = 46;
pub const CSLANGUAGE_NEPALI: csLenguage_t = 47;
pub const CSLANGUAGE_NORWEGIAN: csLenguage_t = 48;
pub const CSLANGUAGE_ORIYA: csLenguage_t = 49;
pub const CSLANGUAGE_POLISH: csLenguage_t = 50;
pub const CSLANGUAGE_PORTUGUESE: csLenguage_t = 51;
pub const CSLANGUAGE_PUNJABI: csLenguage_t = 52;
pub const CSLANGUAGE_ROMANIAN: csLenguage_t = 53;
pub const CSLANGUAGE_RUSSIAN: csLenguage_t = 54;
pub const CSLANGUAGE_SANSKRIT: csLenguage_t = 55;
pub const CSLANGUAGE_SERBIAN: csLenguage_t = 56;
pub const CSLANGUAGE_SINDHI: csLenguage_t = 57;
pub const CSLANGUAGE_SLOVAK: csLenguage_t = 58;
pub const CSLANGUAGE_SLOVENIAN: csLenguage_t = 59;
pub const CSLANGUAGE_SPANISH: csLenguage_t = 60;
pub const CSLANGUAGE_SWAHILI: csLenguage_t = 61;
pub const CSLANGUAGE_SWEDISH: csLenguage_t = 62;
pub const CSLANGUAGE_TAMIL: csLenguage_t = 63;
pub const CSLANGUAGE_TATAR: csLenguage_t = 64;
pub const CSLANGUAGE_TELUGU: csLenguage_t = 65;
pub const CSLANGUAGE_THAI: csLenguage_t = 66;
pub const CSLANGUAGE_TURKISH: csLenguage_t = 67;
pub const CSLANGUAGE_UKRAINIAN: csLenguage_t = 68;
pub const CSLANGUAGE_URDU: csLenguage_t = 69;
pub const CSLANGUAGE_UZBEK: csLenguage_t = 70;
pub const CSLANGUAGE_VIETNAMESE: csLenguage_t = 71;
pub const CSLANGUAGE_COLUMBIAN: csLenguage_t = 72;
/**
* The following constants are used with csound->FileOpen2() and
* csound->ldmemfile2() to specify the format of a file that is being
* opened. This information is passed by Csound to a host's FileOpen
* callback and does not influence the opening operation in any other
* way. Conversion from Csound's TYP_XXX macros for audio formats to
* CSOUND_FILETYPES values can be done with csound->type2csfiletype().
*/
pub type CSOUND_FILETYPES_t = u32;
pub const CSFTYPE_UNIFIED_CSD: CSOUND_FILETYPES_t = 1; /* Unified Csound document */
pub const CSFTYPE_ORCHESTRA: CSOUND_FILETYPES_t = 2; /* the primary orc file (may be temporary) */
pub const CSFTYPE_SCORE: CSOUND_FILETYPES_t = 3; /* the primary sco file (may be temporary)*/
/*or any additional score opened by Cscore */
pub const CSFTYPE_ORC_INCLUDE: CSOUND_FILETYPES_t = 4; /* a file #included by the orchestra */
pub const CSFTYPE_SCO_INCLUDE: CSOUND_FILETYPES_t = 5; /* a file #included by the score */
pub const CSFTYPE_SCORE_OUT: CSOUND_FILETYPES_t = 6; /* used for score.srt, score.xtr, cscore.out */
pub const CSFTYPE_SCOT: CSOUND_FILETYPES_t = 7; /* Scot score input format */
pub const CSFTYPE_OPTIONS: CSOUND_FILETYPES_t = 8; /* for .csoundrc and -@ flag */
pub const CSFTYPE_EXTRACT_PARMS: CSOUND_FILETYPES_t = 9; /* extraction file specified by -x */
/* audio file types that Csound can write (10-19) or read */
pub const CSFTYPE_RAW_AUDIO: CSOUND_FILETYPES_t = 9;
pub const CSFTYPE_IRCAM: CSOUND_FILETYPES_t = 10;
pub const CSFTYPE_AIFF: CSOUND_FILETYPES_t = 11;
pub const CSFTYPE_AIFC: CSOUND_FILETYPES_t = 12;
pub const CSFTYPE_WAVE: CSOUND_FILETYPES_t = 13;
pub const CSFTYPE_AU: CSOUND_FILETYPES_t = 14;
pub const CSFTYPE_SD2: CSOUND_FILETYPES_t = 15;
pub const CSFTYPE_W64: CSOUND_FILETYPES_t = 16;
pub const CSFTYPE_WAVEX: CSOUND_FILETYPES_t = 17;
pub const CSFTYPE_FLAC: CSOUND_FILETYPES_t = 18;
pub const CSFTYPE_CAF: CSOUND_FILETYPES_t = 19;
pub const CSFTYPE_WVE: CSOUND_FILETYPES_t = 20;
pub const CSFTYPE_OGG: CSOUND_FILETYPES_t = 21;
pub const CSFTYPE_MPC2K: CSOUND_FILETYPES_t = 22;
pub const CSFTYPE_RF64: CSOUND_FILETYPES_t = 23;
pub const CSFTYPE_AVR: CSOUND_FILETYPES_t = 24;
pub const CSFTYPE_HTK: CSOUND_FILETYPES_t = 25;
pub const CSFTYPE_MAT4: CSOUND_FILETYPES_t = 26;
pub const CSFTYPE_MAT5: CSOUND_FILETYPES_t = 27;
pub const CSFTYPE_NIST: CSOUND_FILETYPES_t = 28;
pub const CSFTYPE_PAF: CSOUND_FILETYPES_t = 29;
pub const CSFTYPE_PVF: CSOUND_FILETYPES_t = 30;
pub const CSFTYPE_SDS: CSOUND_FILETYPES_t = 31;
pub const CSFTYPE_SVX: CSOUND_FILETYPES_t = 32;
pub const CSFTYPE_VOC: CSOUND_FILETYPES_t = 33;
pub const CSFTYPE_XI: CSOUND_FILETYPES_t = 34;
pub const CSFTYPE_UNKNOWN_AUDIO: CSOUND_FILETYPES_t = 35; /* used when opening audio file for reading
or temp file written with <CsSampleB> */
/* miscellaneous music formats */
pub const CSFTYPE_SOUNDFONT: CSOUND_FILETYPES_t = 36;
pub const CSFTYPE_STD_MIDI: CSOUND_FILETYPES_t = 37; /* Standard MIDI file */
pub const CSFTYPE_MIDI_SYSEX: CSOUND_FILETYPES_t = 38; /* Raw MIDI codes, eg. SysEx dump */
/* analysis formats */
pub const CSFTYPE_HETRO: CSOUND_FILETYPES_t = 39;
pub const CSFTYPE_HETROT: CSOUND_FILETYPES_t = 40;
pub const CSFTYPE_PVC: CSOUND_FILETYPES_t = 41; /* original PVOC format */
pub const CSFTYPE_PVCEX: CSOUND_FILETYPES_t = 42; /* PVOC-EX format */
pub const CSFTYPE_CVANAL: CSOUND_FILETYPES_t = 43;
pub const CSFTYPE_LPC: CSOUND_FILETYPES_t = 44;
pub const CSFTYPE_ATS: CSOUND_FILETYPES_t = 45;
pub const CSFTYPE_LORIS: CSOUND_FILETYPES_t = 46;
pub const CSFTYPE_SDIF: CSOUND_FILETYPES_t = 47;
pub const CSFTYPE_HRTF: CSOUND_FILETYPES_t = 48;
/* Types for plugins and the files they read/write */
pub const CSFTYPE_UNUSED: CSOUND_FILETYPES_t = 49;
pub const CSFTYPE_LADSPA_PLUGIN: CSOUND_FILETYPES_t = 50;
pub const CSFTYPE_SNAPSHOT: CSOUND_FILETYPES_t = 51;
/* Special formats for Csound ftables or scanned synthesis
matrices with header info */
pub const CSFTYPE_FTABLES_TEXT: CSOUND_FILETYPES_t = 52; /* for ftsave and ftload */
pub const CSFTYPE_FTABLES_BINARY: CSOUND_FILETYPES_t = 53; /* for ftsave and ftload */
pub const CSFTYPE_XSCANU_MATRIX: CSOUND_FILETYPES_t = 54; /* for xscanu opcode */
/* These are for raw lists of numbers without header info */
pub const CSFTYPE_FLOATS_TEXT: CSOUND_FILETYPES_t = 55; /* used by GEN23, GEN28, dumpk, readk */
pub const CSFTYPE_FLOATS_BINARY: CSOUND_FILETYPES_t = 56; /* used by dumpk, readk, etc. */
pub const CSFTYPE_INTEGER_TEXT: CSOUND_FILETYPES_t = 57; /* used by dumpk, readk, etc. */
pub const CSFTYPE_INTEGER_BINARY: CSOUND_FILETYPES_t = 58; /* used by dumpk, readk, etc. */
/* image file formats */
pub const CSFTYPE_IMAGE_PNG: CSOUND_FILETYPES_t = 59;
/* For files that don't match any of the above */
pub const CSFTYPE_POSTSCRIPT: CSOUND_FILETYPES_t = 60; /* EPS format used by graphs */
pub const CSFTYPE_SCRIPT_TEXT: CSOUND_FILETYPES_t = 61; /* executable script files (eg. Python) */
pub const CSFTYPE_OTHER_TEXT: CSOUND_FILETYPES_t = 62;
pub const CSFTYPE_OTHER_BINARY: CSOUND_FILETYPES_t = 63;
/* This should only be used internally by the original FileOpen()
API call or for temp files written with <CsFileB> */
pub const CSFTYPE_UNKNOWN: CSOUND_FILETYPES_t = 0;
//pub type CSOUND = CSOUND_;
pub enum CSOUND {}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct windat_ {
_unused: [u8; 0],
}
pub type WINDAT = windat_;
#[repr(C)]
#[allow(non_snake_case)]
#[derive(Debug, Copy, Clone)]
pub struct CSOUND_PARAMS {
pub debug_mode: c_int,
pub buffer_frames: c_int,
pub hardware_buffer_frames: c_int,
pub displays: c_int,
pub ascii_graphs: c_int,
pub postscript_graphs: c_int,
pub message_level: c_int,
pub tempo: c_int,
pub ring_bell: c_int,
pub use_cscore: c_int,
pub terminate_on_midi: c_int,
pub heartbeat: c_int,
pub defer_gen01_load: c_int,
pub midi_key: c_int,
pub midi_key_cps: c_int,
pub midi_key_oct: c_int,
pub midi_key_pch: c_int,
pub midi_velocity: c_int,
pub midi_velocity_amp: c_int,
pub no_default_paths: c_int,
pub number_of_threads: c_int,
pub syntax_check_only: c_int,
pub csd_line_counts: c_int,
pub compute_weights: c_int,
pub realtime_mode: c_int,
pub sample_accurate: c_int,
pub sample_rate_override: c_double,
pub control_rate_override: c_double,
pub nchnls_override: c_int,
pub nchnls_i_override: c_int,
pub e0dbfs_override: c_double,
pub daemon: c_int,
pub ksmps_override: c_int,
pub FFT_library: c_int,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ORCTOKEN {
pub type_: c_int,
pub lexeme: *mut c_char,
pub value: c_int,
pub fvalue: c_double,
pub optype: *mut c_char,
pub next: *mut ORCTOKEN,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct Tree {
pub type_: c_int,
pub value: *mut ORCTOKEN,
pub rate: c_int,
pub len: c_int,
pub line: c_int,
pub locn: u64,
pub left: *mut Tree,
pub right: *mut Tree,
pub next: *mut Tree,
pub markup: *mut c_void,
}
#[repr(C)]
#[allow(non_snake_case)]
#[derive(Copy, Clone)]
pub struct CS_AUDIODEVICE {
pub device_name: [c_char; 64usize],
pub device_id: [c_char; 64usize],
pub rt_module: [c_char; 64usize],
pub max_nchnls: c_int,
pub isOutput: c_int,
}
impl Default for CS_AUDIODEVICE {
fn default() -> CS_AUDIODEVICE {
CS_AUDIODEVICE {
device_name: [0; 64usize],
device_id: [0; 64usize],
rt_module: [0; 64usize],
max_nchnls: 0,
isOutput: 0,
}
}
}
pub type PVSDATEXT = pvsdat_ext;
#[repr(C)]
#[allow(non_snake_case)]
#[derive(Debug, Copy, Clone)]
pub struct csRtAudioParams {
pub devName: *mut c_char,
pub devNum: c_int,
pub bufSamp_SW: c_uint,
pub bufSamp_HW: c_int,
pub nChannels: c_int,
pub sampleFormat: c_int,
pub sampleRate: c_float,
}
impl Default for csRtAudioParams {
fn default() -> csRtAudioParams {
csRtAudioParams {
devName: null_mut(),
devNum: 0,
bufSamp_SW: 0,
bufSamp_HW: 0,
nChannels: 0,
sampleFormat: 0,
sampleRate: 0.0,
}
}
}
#[repr(C)]
#[allow(non_snake_case)]
#[derive(Copy, Clone)]
pub struct CS_MIDIDEVICE {
pub device_name: [c_char; 64usize],
pub interface_name: [c_char; 64usize],
pub device_id: [c_char; 64usize],
pub midi_module: [c_char; 64usize],
pub isOutput: c_int,
}
impl Default for CS_MIDIDEVICE {
fn default() -> CS_MIDIDEVICE {
CS_MIDIDEVICE {
device_name: [0; 64usize],
interface_name: [0; 64usize],
device_id: [0; 64usize],
midi_module: [0; 64usize],
isOutput: 0,
}
}
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct controlChannelHints_s {
pub behav: controlChannelBehavior,
pub dflt: c_double,
pub min: c_double,
pub max: c_double,
pub x: c_int,
pub y: c_int,
pub width: c_int,
pub height: c_int,
/// This member must be set explicitly to NULL if not used
pub attributes: *mut c_char,
}
impl Default for controlChannelHints_s {
fn default() -> controlChannelHints_s {
controlChannelHints_s {
behav: CSOUND_CONTROL_CHANNEL_NO_HINTS,
dflt: 0 as c_double,
min: 0 as c_double,
max: 0 as c_double,
x: 0 as c_int,
y: 0 as c_int,
width: 0 as c_int,
height: 0 as c_int,
attributes: null_mut(),
}
}
}
pub type controlChannelHints_t = controlChannelHints_s;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct controlChannelInfo_s {
pub name: *mut c_char,
pub type_: c_int,
pub hints: controlChannelHints_t,
}
pub type controlChannelInfo_t = controlChannelInfo_s;
impl Default for controlChannelInfo_s {
fn default() -> controlChannelInfo_s {
controlChannelInfo_s {
name: null_mut(),
type_: 0 as c_int,
hints: controlChannelHints_t::default(),
}
}
}
#[repr(C)]
#[allow(non_snake_case)]
#[derive(Debug, Copy, Clone, Default)]
pub struct RTCLOCK_S {
pub starttime_real: c_long,
pub starttime_CPU: c_long,
}
pub type RTCLOCK = RTCLOCK_S;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct opcodeListEntry {
pub opname: *mut c_char,
pub outypes: *mut c_char,
pub intypes: *mut c_char,
pub flags: c_int,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct CsoundRandMTState_ {
pub mti: c_int,
pub mt: [u32; 624usize],
}
pub type CsoundRandMTState = CsoundRandMTState_;
#[repr(C)]
#[allow(non_snake_case)]
#[derive(Debug, Clone)]
pub struct pvsdat_ext {
pub N: c_int,
pub sliding: c_int,
pub NB: c_int,
pub overlap: c_int,
pub winsize: c_int,
pub wintype: c_int,
pub format: c_int,
pub framecount: c_uint,
pub frame: *mut c_float,
}
impl Default for pvsdat_ext {
fn default() -> pvsdat_ext {
pvsdat_ext {
N: 0,
sliding: 0,
NB: 0,
overlap: 0,
winsize: 0,
wintype: 0,
format: 0,
framecount: 0,
frame: null_mut(),
}
}
}
extern "C" {
pub fn csoundLocalizeString(s: *const c_char) -> *mut c_char;
/* Csound instantiation functions ******************************************************** */
pub fn csoundInitialize(flags: c_int) -> c_int;
pub fn csoundCreate(hostData: *mut c_void) -> *mut CSOUND;
pub fn csoundDestroy(arg1: *mut CSOUND);
pub fn csoundGetVersion() -> c_int;
pub fn csoundGetAPIVersion() -> c_int;
/* Csound performance functions ********************************************************* */
pub fn csoundParseOrc(csound: *mut CSOUND, str: *const c_char) -> *mut Tree;
pub fn csoundCompileTree(csound: *mut CSOUND, root: *mut Tree) -> c_int;
pub fn csoundDeleteTree(csound: *mut CSOUND, tree: *mut Tree);
pub fn csoundCompileOrc(csound: *mut CSOUND, str: *const c_char) -> c_int;
pub fn csoundCompileOrcAsync(csound: *mut CSOUND, str: *const c_char) -> c_int;
pub fn csoundEvalCode(csound: *mut CSOUND, str: *const c_char) -> c_double;
pub fn csoundCompileArgs(arg1: *mut CSOUND, argc: c_int, argv: *const *const c_char) -> c_int;
pub fn csoundStart(csound: *mut CSOUND) -> c_int;
pub fn csoundCompile(arg1: *mut CSOUND, argc: c_int, argv: *const *const c_char) -> c_int;
pub fn csoundCompileCsd(csound: *mut CSOUND, str: *const c_char) -> c_int;
pub fn csoundCompileCsdText(csound: *mut CSOUND, csd_text: *const c_char) -> c_int;
pub fn csoundPerform(arg1: *mut CSOUND) -> c_int;
pub fn csoundPerformKsmps(arg1: *mut CSOUND) -> c_int;
pub fn csoundPerformBuffer(arg1: *mut CSOUND) -> c_int;
pub fn csoundStop(arg1: *mut CSOUND);
pub fn csoundCleanup(arg1: *mut CSOUND) -> c_int;
pub fn csoundReset(arg1: *mut CSOUND);
/* UDP functions ************************************************************************/
pub fn csoundUDPServerStart(csound: *mut CSOUND, port: c_int) -> c_int;
pub fn csoundUDPServerStatus(csound: *mut CSOUND) -> c_int;
pub fn csoundUDPServerClose(csound: *mut CSOUND) -> c_int;
pub fn csoundUDPConsole(
csound: *mut CSOUND,
addr: *const c_char,
port: c_int,
mirror: c_int,
) -> c_int;
pub fn csoundStopUDPConsole(csound: *mut CSOUND);
/* Csound atributtes functions ***********************************************************/
pub fn csoundGetA4(arg1: *mut CSOUND) -> c_double;
pub fn csoundGetSr(arg1: *mut CSOUND) -> c_double;
pub fn csoundGetKr(arg1: *mut CSOUND) -> c_double;
pub fn csoundGetKsmps(arg1: *mut CSOUND) -> u32;
pub fn csoundGetNchnls(arg1: *mut CSOUND) -> u32;
pub fn csoundGetNchnlsInput(csound: *mut CSOUND) -> u32;
pub fn csoundGet0dBFS(arg1: *mut CSOUND) -> c_double;
pub fn csoundGetCurrentTimeSamples(csound: *mut CSOUND) -> i64;
pub fn csoundGetSizeOfMYFLT() -> c_int;
pub fn csoundGetHostData(arg1: *mut CSOUND) -> *mut c_void;
pub fn csoundSetHostData(arg1: *mut CSOUND, hostData: *mut c_void);
pub fn csoundSetOption(csound: *mut CSOUND, option: *const c_char) -> c_int;
pub fn csoundSetParams(csound: *mut CSOUND, p: *mut CSOUND_PARAMS);
pub fn csoundGetParams(csound: *mut CSOUND, p: *mut CSOUND_PARAMS);
pub fn csoundGetDebug(arg1: *mut CSOUND) -> c_int;
pub fn csoundSetDebug(arg1: *mut CSOUND, debug: c_int);
/* Csound input/output functions **********************************************************/
pub fn csoundGetOutputName(arg1: *mut CSOUND) -> *const c_char;
pub fn csoundGetInputName(arg1: *mut CSOUND) -> *const c_char;
pub fn csoundSetOutput(
csound: *mut CSOUND,
name: *const c_char,
type_: *const c_char,
format: *const c_char,
);
pub fn csoundGetOutputFormat(csound: *mut CSOUND, type_: *mut c_char, format: *mut c_char);
pub fn csoundSetInput(csound: *mut CSOUND, name: *const c_char);
pub fn csoundSetMIDIInput(csound: *mut CSOUND, name: *const c_char);
pub fn csoundSetMIDIFileInput(csound: *mut CSOUND, name: *const c_char);
pub fn csoundSetMIDIOutput(csound: *mut CSOUND, name: *const c_char);
pub fn csoundSetMIDIFileOutput(csound: *mut CSOUND, name: *const c_char);
pub fn csoundSetFileOpenCallback(
p: *mut CSOUND,
open_callback: Option<csound_file_open_callback>,
);
/* Csound realtime audio I/O functions ***************************************************/
pub fn csoundSetRTAudioModule(csound: *mut CSOUND, module: *const c_char);
pub fn csoundGetModule(
csound: *mut CSOUND,
number: c_int,
name: *mut *mut c_char,
type_: *mut *mut c_char,
) -> c_int;
pub fn csoundGetInputBufferSize(arg1: *mut CSOUND) -> c_long;
pub fn csoundGetOutputBufferSize(arg1: *mut CSOUND) -> c_long;
pub fn csoundGetInputBuffer(arg1: *mut CSOUND) -> *mut c_void;
pub fn csoundGetOutputBuffer(arg1: *mut CSOUND) -> *const c_void;
pub fn csoundGetSpin(arg1: *mut CSOUND) -> *mut c_void;
pub fn csoundClearSpin(arg1: *mut CSOUND);
pub fn csoundAddSpinSample(csound: *mut CSOUND, frame: c_int, channel: c_int, sample: c_double);
pub fn csoundSetSpinSample(csound: *mut CSOUND, frame: c_int, channel: c_int, sample: c_double);
pub fn csoundGetSpout(csound: *mut CSOUND) -> *mut c_void;
pub fn csoundGetSpoutSample(csound: *mut CSOUND, frame: c_int, channel: c_int) -> c_double;
pub fn csoundGetRtRecordUserData(arg1: *mut CSOUND) -> *mut *mut c_void;
pub fn csoundGetRtPlayUserData(arg1: *mut CSOUND) -> *mut *mut c_void;
pub fn csoundSetHostImplementedAudioIO(arg1: *mut CSOUND, state: c_int, bufSize: c_int);
pub fn csoundGetAudioDevList(
csound: *mut CSOUND,
list: *mut CS_AUDIODEVICE,
isOutput: c_int,
) -> c_int;
pub fn csoundSetPlayopenCallback(arg1: *mut CSOUND, func: csound_open_callback);
pub fn csoundSetRecopenCallback(arg1: *mut CSOUND, func: csound_open_callback);
pub fn csoundSetRtplayCallback(arg1: *mut CSOUND, func: csound_rt_play_callback);
pub fn csoundSetRtrecordCallback(arg1: *mut CSOUND, func: csound_rt_rec_callback);
pub fn csoundSetRtcloseCallback(arg1: *mut CSOUND, func: csound_rt_close_callback);
pub fn csoundSetAudioDeviceListCallback(csound: *mut CSOUND, func: csound_dev_list_callback);
/* Csound realtime midi I/O **************************************************************/
pub fn csoundSetMIDIModule(csound: *mut CSOUND, module: *const c_char);
pub fn csoundSetHostImplementedMIDIIO(csound: *mut CSOUND, state: c_int);
pub fn csoundGetMIDIDevList(
csound: *mut CSOUND,
list: *mut CS_MIDIDEVICE,
isOutput: c_int,
) -> c_int;
pub fn csoundSetExternalMidiInOpenCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_open_callback,
);
pub fn csoundSetExternalMidiOutOpenCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_open_callback,
);
pub fn csoundSetExternalMidiReadCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_read_data_callback,
);
pub fn csoundSetExternalMidiWriteCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_write_data_callback,
);
pub fn csoundSetExternalMidiInCloseCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_close_callback,
);
pub fn csoundSetExternalMidiOutCloseCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_close_callback,
);
pub fn csoundSetExternalMidiErrorStringCallback(
arg1: *mut CSOUND,
func: csound_ext_midi_error_callback,
);
pub fn csoundSetMIDIDeviceListCallback(
csound: *mut CSOUND,
func: csound_midi_dev_list_callback,
);
/* Csound score handling functions ********************************************************/
pub fn csoundReadScore(csound: *mut CSOUND, str: *const c_char) -> c_int;
pub fn csoundReadScoreAsync(csound: *mut CSOUND, str: *const c_char);
pub fn csoundGetScoreTime(arg1: *mut CSOUND) -> c_double;
pub fn csoundIsScorePending(arg1: *mut CSOUND) -> c_int;
pub fn csoundSetScorePending(arg1: *mut CSOUND, pending: c_int);
pub fn csoundGetScoreOffsetSeconds(arg1: *mut CSOUND) -> c_double;
pub fn csoundSetScoreOffsetSeconds(arg1: *mut CSOUND, time: c_double);
pub fn csoundRewindScore(arg1: *mut CSOUND);
pub fn csoundScoreSort(arg1: *mut CSOUND, input: *const FILE, out: *mut FILE) -> c_int;
pub fn csoundScoreExtract(
arg1: *mut CSOUND,
input: *const FILE,
out: *mut FILE,
extract: *const FILE,
) -> c_int;
/* Csound messages and text functions *****************************************************/
pub fn csoundSetDefaultMessageCallback(callback: default_message_callback);
pub fn csoundMessage(arg1: *mut CSOUND, format: *const c_char, ...);
pub fn csoundSetMessageStringCallback(arg1: *mut CSOUND, callback: csound_message_callback);
pub fn csoundGetMessageLevel(arg1: *mut CSOUND) -> c_int;
pub fn csoundSetMessageLevel(arg1: *mut CSOUND, messageLevel: c_int);
pub fn csoundCreateMessageBuffer(csound: *mut CSOUND, toStdOut: c_int);
pub fn csoundGetFirstMessage(csound: *mut CSOUND) -> *const c_char;
pub fn csoundGetFirstMessageAttr(csound: *mut CSOUND) -> c_int;
pub fn csoundPopFirstMessage(csound: *mut CSOUND);
pub fn csoundGetMessageCnt(csound: *mut CSOUND) -> c_int;
pub fn csoundDestroyMessageBuffer(csound: *mut CSOUND);
pub fn csoundGetChannelPtr(
arg1: *mut CSOUND,
p: *mut *mut c_double,
name: *const c_char,
type_: c_int,
) -> c_int;
pub fn csoundListChannels(arg1: *mut CSOUND, lst: *mut *mut controlChannelInfo_t) -> c_int;
pub fn csoundDeleteChannelList(arg1: *mut CSOUND, lst: *mut controlChannelInfo_t);
pub fn csoundSetControlChannelHints(
arg1: *mut CSOUND,
name: *const c_char,
hints: controlChannelHints_t,
) -> c_int;
pub fn csoundGetControlChannelHints(
arg1: *mut CSOUND,
name: *const c_char,
hints: *mut controlChannelHints_t,
) -> c_int;
pub fn csoundGetChannelLock(arg1: *mut CSOUND, name: *const c_char) -> *mut c_int;
pub fn csoundGetControlChannel(
csound: *mut CSOUND,
name: *const c_char,
err: *mut c_int,
) -> c_double;
pub fn csoundSetControlChannel(csound: *mut CSOUND, name: *const c_char, val: c_double);
pub fn csoundGetAudioChannel(csound: *mut CSOUND, name: *const c_char, samples: *mut c_double);
pub fn csoundSetAudioChannel(csound: *mut CSOUND, name: *const c_char, samples: *mut c_double);
pub fn csoundGetStringChannel(csound: *mut CSOUND, name: *const c_char, string: *mut c_char);
pub fn csoundSetStringChannel(csound: *mut CSOUND, name: *const c_char, string: *mut c_char);
pub fn csoundGetChannelDatasize(csound: *mut CSOUND, name: *const c_char) -> c_int;
pub fn csoundSetInputChannelCallback(
csound: *mut CSOUND,
inputChannelCalback: Option<csound_channel_callback>,
);
pub fn csoundSetOutputChannelCallback(
csound: *mut CSOUND,
outputChannelCalback: Option<csound_channel_callback>,
);
pub fn csoundSetPvsChannel(
arg1: *mut CSOUND,
fin: *const PVSDATEXT,
name: *const c_char,
) -> c_int;
pub fn csoundGetPvsChannel(
csound: *mut CSOUND,
fout: *mut PVSDATEXT,
name: *const c_char,
) -> c_int;
pub fn csoundScoreEvent(
arg1: *mut CSOUND,
type_: c_char,
pFields: *const c_double,
numFields: c_long,
) -> c_int;
pub fn csoundScoreEventAbsolute(
arg1: *mut CSOUND,
type_: c_char,
pfields: *const c_double,
numFields: c_long,
time_ofs: c_double,
) -> c_int;
pub fn csoundScoreEventAsync(
arg1: *mut CSOUND,
type_: c_char,
pFields: *const c_double,
numFields: c_long,
) -> c_int;
pub fn csoundScoreEventAbsoluteAsync(
arg1: *mut CSOUND,
type_: c_char,
pfields: *const c_double,
numFields: c_long,
time_ofs: c_double,
) -> c_int;
pub fn csoundInputMessage(arg1: *mut CSOUND, message: *const c_char);
pub fn csoundInputMessageAsync(arg1: *mut CSOUND, message: *const c_char);
pub fn csoundKillInstance(
arg1: *mut CSOUND,
arg2: c_double,
arg3: *const c_char,
arg4: c_int,
arg5: c_int,
) -> c_int;
pub fn csoundRegisterSenseEventCallback(
arg1: *mut CSOUND,
func: Option<unsafe extern "C" fn(arg1: *mut CSOUND, arg2: *mut c_void)>,
userData: *mut c_void,
) -> c_int;
pub fn csoundKeyPress(arg1: *mut CSOUND, c: c_char);
pub fn csoundRegisterKeyboardCallback(
arg1: *mut CSOUND,
func: Option<
unsafe extern "C" fn(userData: *mut c_void, p: *mut c_void, type_: c_uint) -> c_int,
>,
userData: *mut c_void,
type_: c_uint,
) -> c_int;
pub fn csoundRemoveKeyboardCallback(
csound: *mut CSOUND,
func: Option<
unsafe extern "C" fn(arg1: *mut c_void, arg2: *mut c_void, arg3: c_uint) -> c_int,
>,
);
pub fn csoundTableLength(arg1: *mut CSOUND, table: c_int) -> c_int;
pub fn csoundTableGet(arg1: *mut CSOUND, table: c_int, index: c_int) -> c_double;
pub fn csoundTableSet(arg1: *mut CSOUND, table: c_int, index: c_int, value: c_double);
pub fn csoundTableCopyOut(csound: *mut CSOUND, table: c_int, dest: *mut c_double);
pub fn csoundTableCopyOutAsync(csound: *mut CSOUND, table: c_int, dest: *mut c_double);
pub fn csoundTableCopyIn(csound: *mut CSOUND, table: c_int, src: *const c_double);
pub fn csoundTableCopyInAsync(csound: *mut CSOUND, table: c_int, src: *const c_double);
pub fn csoundGetTable(
arg1: *mut CSOUND,
tablePtr: *mut *mut c_double,
tableNum: c_int,
) -> c_int;
pub fn csoundGetTableArgs(
csound: *mut CSOUND,
argsPtr: *mut *mut c_double,
tableNum: c_int,
) -> c_int;
pub fn csoundIsNamedGEN(csound: *mut CSOUND, num: c_int) -> c_int;
pub fn csoundGetNamedGEN(csound: *mut CSOUND, num: c_int, name: *mut c_char, len: c_int);
pub fn csoundSetIsGraphable(arg1: *mut CSOUND, isGraphable: c_int) -> c_int;
pub fn csoundSetMakeGraphCallback(
arg1: *mut CSOUND,
makeGraphCallback_: Option<
unsafe extern "C" fn(arg1: *mut CSOUND, windat: *mut WINDAT, name: *const c_char),
>,
);
pub fn csoundSetDrawGraphCallback(
arg1: *mut CSOUND,
drawGraphCallback_: Option<
unsafe extern "C" fn(arg1: *mut CSOUND, windat: *mut WINDAT),
>,
);
pub fn csoundSetKillGraphCallback(
arg1: *mut CSOUND,
killGraphCallback_: Option<
unsafe extern "C" fn(arg1: *mut CSOUND, windat: *mut WINDAT),
>,
);
pub fn csoundSetExitGraphCallback(
arg1: *mut CSOUND,
exitGraphCallback_: Option<unsafe extern "C" fn(arg1: *mut CSOUND) -> c_int>,
);
pub fn csoundGetNamedGens(arg1: *mut CSOUND) -> *mut c_void;
pub fn csoundNewOpcodeList(arg1: *mut CSOUND, opcodelist: *mut *mut opcodeListEntry) -> c_int;
pub fn csoundDisposeOpcodeList(arg1: *mut CSOUND, opcodelist: *mut opcodeListEntry);
pub fn csoundAppendOpcode(
arg1: *mut CSOUND,
opname: *const c_char,
dsblksiz: c_int,
flags: c_int,
thread: c_int,
outypes: *const c_char,
intypes: *const c_char,
iopadr: Option<
unsafe extern "C" fn(arg1: *mut CSOUND, arg2: *mut c_void) -> c_int,
>,
kopadr: Option<
unsafe extern "C" fn(arg1: *mut CSOUND, arg2: *mut c_void) -> c_int,
>,
aopadr: Option<
unsafe extern "C" fn(arg1: *mut CSOUND, arg2: *mut c_void) -> c_int,
>,
) -> c_int;
pub fn csoundSetYieldCallback(
arg1: *mut CSOUND,
yieldCallback_: Option<unsafe extern "C" fn(arg1: *mut CSOUND) -> c_int>,
);
pub fn csoundCreateThread(
threadRoutine: Option<unsafe extern "C" fn(arg1: *mut c_void) -> usize>,
userdata: *mut c_void,
) -> *mut c_void;
pub fn csoundGetCurrentThreadId() -> *mut c_void;
pub fn csoundJoinThread(thread: *mut c_void) -> usize;
pub fn csoundCreateThreadLock() -> *mut c_void;
pub fn csoundWaitThreadLock(lock: *mut c_void, milliseconds: usize) -> c_int;
pub fn csoundWaitThreadLockNoTimeout(lock: *mut c_void);
pub fn csoundNotifyThreadLock(lock: *mut c_void);
pub fn csoundDestroyThreadLock(lock: *mut c_void);
pub fn csoundCreateMutex(isRecursive: c_int) -> *mut c_void;
pub fn csoundLockMutex(mutex_: *mut c_void);
pub fn csoundLockMutexNoWait(mutex_: *mut c_void) -> c_int;
pub fn csoundUnlockMutex(mutex_: *mut c_void);
pub fn csoundDestroyMutex(mutex_: *mut c_void);
pub fn csoundCreateBarrier(max: c_uint) -> *mut c_void;
pub fn csoundDestroyBarrier(barrier: *mut c_void) -> c_int;
pub fn csoundWaitBarrier(barrier: *mut c_void) -> c_int;
pub fn csoundSleep(milliseconds: usize);
pub fn csoundRunCommand(argv: *const *const c_char, noWait: c_int) -> c_long;
pub fn csoundInitTimerStruct(arg1: *mut RTCLOCK);
pub fn csoundGetRealTime(arg1: *mut RTCLOCK) -> c_double;
pub fn csoundGetCPUTime(arg1: *mut RTCLOCK) -> c_double;
pub fn csoundGetRandomSeedFromTime() -> u32;
pub fn csoundSetLanguage(lang_code: csLenguage_t);
pub fn csoundGetEnv(csound: *mut CSOUND, name: *const c_char) -> *const c_char;
pub fn csoundSetGlobalEnv(name: *const c_char, value: *const c_char) -> c_int;
pub fn csoundCreateGlobalVariable(
arg1: *mut CSOUND,
name: *const c_char,
nbytes: usize,
) -> c_int;
pub fn csoundQueryGlobalVariable(arg1: *mut CSOUND, name: *const c_char) -> *mut c_void;
pub fn csoundQueryGlobalVariableNoCheck(arg1: *mut CSOUND, name: *const c_char) -> *mut c_void;
pub fn csoundDestroyGlobalVariable(arg1: *mut CSOUND, name: *const c_char) -> c_int;
pub fn csoundRunUtility(
arg1: *mut CSOUND,
name: *const c_char,
argc: c_int,
argv: *mut *mut c_char,
) -> c_int;
pub fn csoundListUtilities(arg1: *mut CSOUND) -> *mut *mut c_char;
pub fn csoundDeleteUtilityList(arg1: *mut CSOUND, lst: *mut *mut c_char);
pub fn csoundGetUtilityDescription(arg1: *mut CSOUND, utilName: *const c_char)
-> *const c_char;
pub fn csoundRand31(seedVal: *mut c_int) -> c_int;
pub fn csoundSeedRandMT(p: *mut CsoundRandMTState, initKey: *const u32, keyLength: u32);
pub fn csoundRandMT(p: *mut CsoundRandMTState) -> u32;
pub fn csoundCreateConfigurationVariable(
csound: *mut CSOUND,
name: *const c_char,
p: *mut c_void,
type_: c_int,
flags: c_int,
min: *mut c_void,
max: *mut c_void,
shortDesc: *const c_char,
longDesc: *const c_char,
) -> c_int;
pub fn csoundSetConfigurationVariable(
csound: *mut CSOUND,
name: *const c_char,
value: *mut c_void,
) -> c_int;
pub fn csoundParseConfigurationVariable(
csound: *mut CSOUND,
name: *const c_char,
value: *const c_char,
) -> c_int;
pub fn csoundCfgErrorCodeToString(errcode: c_int) -> *const c_char;
pub fn csoundCreateCircularBuffer(
csound: *mut CSOUND,
numelem: c_int,
elemsize: c_int,
) -> *mut c_void;
pub fn csoundReadCircularBuffer(
csound: *mut CSOUND,
circular_buffer: *mut c_void,
out: *mut c_void,
items: c_int,
) -> c_int;
pub fn csoundPeekCircularBuffer(
csound: *mut CSOUND,
circular_buffer: *mut c_void,
out: *mut c_void,
items: c_int,
) -> c_int;
pub fn csoundWriteCircularBuffer(
csound: *mut CSOUND,
p: *mut c_void,
inp: *const c_void,
items: c_int,
) -> c_int;
pub fn csoundFlushCircularBuffer(csound: *mut CSOUND, p: *mut c_void);
pub fn csoundDestroyCircularBuffer(csound: *mut CSOUND, circularbuffer: *mut c_void);
pub fn csoundOpenLibrary(library: *mut *mut c_void, libraryPath: *const c_char) -> c_int;
pub fn csoundCloseLibrary(library: *mut c_void) -> c_int;
pub fn csoundGetLibrarySymbol(library: *mut c_void, symbolName: *const c_char) -> *mut c_void;
pub fn csoundSetCscoreCallback(csound: *mut CSOUND, call: cscore_callback_type);
}
| 33.960931 | 101 | 0.685595 |
8a9fb0eab13527ab59bc3d16b53e81e6017d9d29 | 74,850 | //! `Tab`s holds multiple panes. It tracks their coordinates (x/y) and size,
//! as well as how they should be resized
mod clipboard;
mod copy_command;
use copy_command::CopyCommand;
use zellij_tile::prelude::Style;
use zellij_utils::position::{Column, Line};
use zellij_utils::{position::Position, serde, zellij_tile};
use crate::screen::CopyOptions;
use crate::ui::pane_boundaries_frame::FrameParams;
use self::clipboard::ClipboardProvider;
use crate::{
os_input_output::ServerOsApi,
output::{CharacterChunk, Output},
panes::{FloatingPanes, TiledPanes},
panes::{LinkHandler, PaneId, PluginPane, TerminalPane},
pty::{ClientOrTabIndex, PtyInstruction, VteBytes},
thread_bus::ThreadSenders,
wasm_vm::PluginInstruction,
ClientId, ServerInstruction,
};
use serde::{Deserialize, Serialize};
use std::cell::RefCell;
use std::os::unix::io::RawFd;
use std::rc::Rc;
use std::sync::mpsc::channel;
use std::time::Instant;
use std::{
collections::{HashMap, HashSet},
str,
};
use zellij_tile::data::{Event, InputMode, ModeInfo, Palette, PaletteColor};
use zellij_utils::{
input::{
command::TerminalAction,
layout::{Layout, Run},
parse_keys,
},
pane_size::{Offset, PaneGeom, Size, SizeInPixels, Viewport},
};
macro_rules! resize_pty {
($pane:expr, $os_input:expr) => {
if let PaneId::Terminal(ref pid) = $pane.pid() {
// FIXME: This `set_terminal_size_using_fd` call would be best in
// `TerminalPane::reflow_lines`
$os_input.set_terminal_size_using_fd(
*pid,
$pane.get_content_columns() as u16,
$pane.get_content_rows() as u16,
);
}
};
}
// FIXME: This should be replaced by `RESIZE_PERCENT` at some point
pub const MIN_TERMINAL_HEIGHT: usize = 5;
pub const MIN_TERMINAL_WIDTH: usize = 5;
const MAX_PENDING_VTE_EVENTS: usize = 7000;
pub(crate) struct Tab {
pub index: usize,
pub position: usize,
pub name: String,
tiled_panes: TiledPanes,
floating_panes: FloatingPanes,
max_panes: Option<usize>,
viewport: Rc<RefCell<Viewport>>, // includes all non-UI panes
display_area: Rc<RefCell<Size>>, // includes all panes (including eg. the status bar and tab bar in the default layout)
character_cell_size: Rc<RefCell<Option<SizeInPixels>>>,
os_api: Box<dyn ServerOsApi>,
pub senders: ThreadSenders,
synchronize_is_active: bool,
should_clear_display_before_rendering: bool,
mode_info: Rc<RefCell<HashMap<ClientId, ModeInfo>>>,
default_mode_info: ModeInfo,
pub style: Style,
connected_clients: Rc<RefCell<HashSet<ClientId>>>,
draw_pane_frames: bool,
pending_vte_events: HashMap<RawFd, Vec<VteBytes>>,
pub selecting_with_mouse: bool, // this is only pub for the tests TODO: remove this once we combine write_text_to_clipboard with render
link_handler: Rc<RefCell<LinkHandler>>,
clipboard_provider: ClipboardProvider,
// TODO: used only to focus the pane when the layout is loaded
// it seems that optimization is possible using `active_panes`
focus_pane_id: Option<PaneId>,
copy_on_select: bool,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(crate = "self::serde")]
pub(crate) struct TabData {
pub position: usize,
pub name: String,
pub active: bool,
pub mode_info: ModeInfo,
pub colors: Palette,
}
// FIXME: Use a struct that has a pane_type enum, to reduce all of the duplication
pub trait Pane {
fn x(&self) -> usize;
fn y(&self) -> usize;
fn rows(&self) -> usize;
fn cols(&self) -> usize;
fn get_content_x(&self) -> usize;
fn get_content_y(&self) -> usize;
fn get_content_columns(&self) -> usize;
fn get_content_rows(&self) -> usize;
fn reset_size_and_position_override(&mut self);
fn set_geom(&mut self, position_and_size: PaneGeom);
fn get_geom_override(&mut self, pane_geom: PaneGeom);
fn handle_pty_bytes(&mut self, bytes: VteBytes);
fn cursor_coordinates(&self) -> Option<(usize, usize)>;
fn adjust_input_to_terminal(&self, input_bytes: Vec<u8>) -> Vec<u8>;
fn position_and_size(&self) -> PaneGeom;
fn current_geom(&self) -> PaneGeom;
fn geom_override(&self) -> Option<PaneGeom>;
fn should_render(&self) -> bool;
fn set_should_render(&mut self, should_render: bool);
fn set_should_render_boundaries(&mut self, _should_render: bool) {}
fn selectable(&self) -> bool;
fn set_selectable(&mut self, selectable: bool);
fn render(
&mut self,
client_id: Option<ClientId>,
) -> Option<(Vec<CharacterChunk>, Option<String>)>; // TODO: better
fn render_frame(
&mut self,
client_id: ClientId,
frame_params: FrameParams,
input_mode: InputMode,
) -> Option<(Vec<CharacterChunk>, Option<String>)>; // TODO: better
fn render_fake_cursor(
&mut self,
cursor_color: PaletteColor,
text_color: PaletteColor,
) -> Option<String>;
fn render_terminal_title(&mut self, _input_mode: InputMode) -> String;
fn update_name(&mut self, name: &str);
fn pid(&self) -> PaneId;
fn reduce_height(&mut self, percent: f64);
fn increase_height(&mut self, percent: f64);
fn reduce_width(&mut self, percent: f64);
fn increase_width(&mut self, percent: f64);
fn push_down(&mut self, count: usize);
fn push_right(&mut self, count: usize);
fn pull_left(&mut self, count: usize);
fn pull_up(&mut self, count: usize);
fn scroll_up(&mut self, count: usize, client_id: ClientId);
fn scroll_down(&mut self, count: usize, client_id: ClientId);
fn clear_scroll(&mut self);
fn is_scrolled(&self) -> bool;
fn active_at(&self) -> Instant;
fn set_active_at(&mut self, instant: Instant);
fn set_frame(&mut self, frame: bool);
fn set_content_offset(&mut self, offset: Offset);
fn cursor_shape_csi(&self) -> String {
"\u{1b}[0 q".to_string() // default to non blinking block
}
fn contains(&self, position: &Position) -> bool {
match self.geom_override() {
Some(position_and_size) => position_and_size.contains(position),
None => self.position_and_size().contains(position),
}
}
fn start_selection(&mut self, _start: &Position, _client_id: ClientId) {}
fn update_selection(&mut self, _position: &Position, _client_id: ClientId) {}
fn end_selection(&mut self, _end: &Position, _client_id: ClientId) {}
fn reset_selection(&mut self) {}
fn get_selected_text(&self) -> Option<String> {
None
}
fn right_boundary_x_coords(&self) -> usize {
self.x() + self.cols()
}
fn bottom_boundary_y_coords(&self) -> usize {
self.y() + self.rows()
}
fn is_right_of(&self, other: &dyn Pane) -> bool {
self.x() > other.x()
}
fn is_directly_right_of(&self, other: &dyn Pane) -> bool {
self.x() == other.x() + other.cols()
}
fn is_left_of(&self, other: &dyn Pane) -> bool {
self.x() < other.x()
}
fn is_directly_left_of(&self, other: &dyn Pane) -> bool {
self.x() + self.cols() == other.x()
}
fn is_below(&self, other: &dyn Pane) -> bool {
self.y() > other.y()
}
fn is_directly_below(&self, other: &dyn Pane) -> bool {
self.y() == other.y() + other.rows()
}
fn is_above(&self, other: &dyn Pane) -> bool {
self.y() < other.y()
}
fn is_directly_above(&self, other: &dyn Pane) -> bool {
self.y() + self.rows() == other.y()
}
fn horizontally_overlaps_with(&self, other: &dyn Pane) -> bool {
(self.y() >= other.y() && self.y() < (other.y() + other.rows()))
|| ((self.y() + self.rows()) <= (other.y() + other.rows())
&& (self.y() + self.rows()) > other.y())
|| (self.y() <= other.y() && (self.y() + self.rows() >= (other.y() + other.rows())))
|| (other.y() <= self.y() && (other.y() + other.rows() >= (self.y() + self.rows())))
}
fn get_horizontal_overlap_with(&self, other: &dyn Pane) -> usize {
std::cmp::min(self.y() + self.rows(), other.y() + other.rows())
- std::cmp::max(self.y(), other.y())
}
fn vertically_overlaps_with(&self, other: &dyn Pane) -> bool {
(self.x() >= other.x() && self.x() < (other.x() + other.cols()))
|| ((self.x() + self.cols()) <= (other.x() + other.cols())
&& (self.x() + self.cols()) > other.x())
|| (self.x() <= other.x() && (self.x() + self.cols() >= (other.x() + other.cols())))
|| (other.x() <= self.x() && (other.x() + other.cols() >= (self.x() + self.cols())))
}
fn get_vertical_overlap_with(&self, other: &dyn Pane) -> usize {
std::cmp::min(self.x() + self.cols(), other.x() + other.cols())
- std::cmp::max(self.x(), other.x())
}
fn can_reduce_height_by(&self, reduce_by: usize) -> bool {
self.rows() > reduce_by && self.rows() - reduce_by >= self.min_height()
}
fn can_reduce_width_by(&self, reduce_by: usize) -> bool {
self.cols() > reduce_by && self.cols() - reduce_by >= self.min_width()
}
fn min_width(&self) -> usize {
MIN_TERMINAL_WIDTH
}
fn min_height(&self) -> usize {
MIN_TERMINAL_HEIGHT
}
fn drain_messages_to_pty(&mut self) -> Vec<Vec<u8>> {
// TODO: this is only relevant to terminal panes
// we should probably refactor away from this trait at some point
vec![]
}
fn render_full_viewport(&mut self) {}
fn relative_position(&self, position_on_screen: &Position) -> Position {
position_on_screen.relative_to(self.get_content_y(), self.get_content_x())
}
fn position_is_on_frame(&self, position_on_screen: &Position) -> bool {
// TODO: handle cases where we have no frame
position_on_screen.line() == self.y() as isize
|| position_on_screen.line()
== (self.y() as isize + self.rows() as isize).saturating_sub(1)
|| position_on_screen.column() == self.x()
|| position_on_screen.column() == (self.x() + self.cols()).saturating_sub(1)
}
fn set_borderless(&mut self, borderless: bool);
fn borderless(&self) -> bool;
fn handle_right_click(&mut self, _to: &Position, _client_id: ClientId) {}
fn mouse_mode(&self) -> bool;
}
impl Tab {
// FIXME: Still too many arguments for clippy to be happy...
#[allow(clippy::too_many_arguments)]
pub fn new(
index: usize,
position: usize,
name: String,
display_area: Size,
character_cell_size: Rc<RefCell<Option<SizeInPixels>>>,
os_api: Box<dyn ServerOsApi>,
senders: ThreadSenders,
max_panes: Option<usize>,
style: Style,
default_mode_info: ModeInfo,
draw_pane_frames: bool,
connected_clients_in_app: Rc<RefCell<HashSet<ClientId>>>,
session_is_mirrored: bool,
client_id: ClientId,
copy_options: CopyOptions,
) -> Self {
let name = if name.is_empty() {
format!("Tab #{}", index + 1)
} else {
name
};
let mut connected_clients = HashSet::new();
connected_clients.insert(client_id);
let viewport: Viewport = display_area.into();
let viewport = Rc::new(RefCell::new(viewport));
let display_area = Rc::new(RefCell::new(display_area));
let connected_clients = Rc::new(RefCell::new(connected_clients));
let mode_info = Rc::new(RefCell::new(HashMap::new()));
let tiled_panes = TiledPanes::new(
display_area.clone(),
viewport.clone(),
connected_clients.clone(),
connected_clients_in_app.clone(),
mode_info.clone(),
character_cell_size.clone(),
session_is_mirrored,
draw_pane_frames,
default_mode_info.clone(),
style,
os_api.clone(),
);
let floating_panes = FloatingPanes::new(
display_area.clone(),
viewport.clone(),
connected_clients.clone(),
connected_clients_in_app,
mode_info.clone(),
session_is_mirrored,
default_mode_info.clone(),
style,
);
let clipboard_provider = match copy_options.command {
Some(command) => ClipboardProvider::Command(CopyCommand::new(command)),
None => ClipboardProvider::Osc52(copy_options.clipboard),
};
Tab {
index,
position,
tiled_panes,
floating_panes,
name,
max_panes,
viewport,
display_area,
character_cell_size,
synchronize_is_active: false,
os_api,
senders,
should_clear_display_before_rendering: false,
style,
mode_info,
default_mode_info,
draw_pane_frames,
pending_vte_events: HashMap::new(),
connected_clients,
selecting_with_mouse: false,
link_handler: Rc::new(RefCell::new(LinkHandler::new())),
clipboard_provider,
focus_pane_id: None,
copy_on_select: copy_options.copy_on_select,
}
}
pub fn apply_layout(
&mut self,
layout: Layout,
new_pids: Vec<RawFd>,
tab_index: usize,
client_id: ClientId,
) {
if self.tiled_panes.has_panes() {
log::error!(
"Applying a layout to a tab with existing panes - this is not yet supported!"
);
}
let (viewport_cols, viewport_rows) = {
let viewport = self.viewport.borrow();
(viewport.cols, viewport.rows)
};
let mut free_space = PaneGeom::default();
free_space.cols.set_inner(viewport_cols);
free_space.rows.set_inner(viewport_rows);
let positions_in_layout = layout.position_panes_in_space(&free_space);
let positions_and_size = positions_in_layout.iter();
let mut new_pids = new_pids.iter();
let mut focus_pane_id: Option<PaneId> = None;
let mut set_focus_pane_id = |layout: &Layout, pane_id: PaneId| {
if layout.focus.unwrap_or(false) && focus_pane_id.is_none() {
focus_pane_id = Some(pane_id);
}
};
for (layout, position_and_size) in positions_and_size {
// A plugin pane
if let Some(Run::Plugin(run)) = layout.run.clone() {
let (pid_tx, pid_rx) = channel();
let pane_title = run.location.to_string();
self.senders
.send_to_plugin(PluginInstruction::Load(pid_tx, run, tab_index, client_id))
.unwrap();
let pid = pid_rx.recv().unwrap();
let mut new_plugin = PluginPane::new(
pid,
*position_and_size,
self.senders.to_plugin.as_ref().unwrap().clone(),
pane_title,
layout.pane_name.clone().unwrap_or_default(),
);
new_plugin.set_borderless(layout.borderless);
self.tiled_panes
.add_pane_with_existing_geom(PaneId::Plugin(pid), Box::new(new_plugin));
set_focus_pane_id(layout, PaneId::Plugin(pid));
} else {
// there are still panes left to fill, use the pids we received in this method
let pid = new_pids.next().unwrap(); // if this crashes it means we got less pids than there are panes in this layout
let next_terminal_position = self.get_next_terminal_position();
let mut new_pane = TerminalPane::new(
*pid,
*position_and_size,
self.style,
next_terminal_position,
layout.pane_name.clone().unwrap_or_default(),
self.link_handler.clone(),
self.character_cell_size.clone(),
);
new_pane.set_borderless(layout.borderless);
self.tiled_panes
.add_pane_with_existing_geom(PaneId::Terminal(*pid), Box::new(new_pane));
set_focus_pane_id(layout, PaneId::Terminal(*pid));
}
}
for unused_pid in new_pids {
// this is a bit of a hack and happens because we don't have any central location that
// can query the screen as to how many panes it needs to create a layout
// fixing this will require a bit of an architecture change
self.senders
.send_to_pty(PtyInstruction::ClosePane(PaneId::Terminal(*unused_pid)))
.unwrap();
}
// FIXME: This is another hack to crop the viewport to fixed-size panes. Once you can have
// non-fixed panes that are part of the viewport, get rid of this!
let display_area = {
let display_area = self.display_area.borrow();
*display_area
};
self.resize_whole_tab(display_area);
let boundary_geoms = self.tiled_panes.fixed_pane_geoms();
for geom in boundary_geoms {
self.offset_viewport(&geom)
}
self.tiled_panes.set_pane_frames(self.draw_pane_frames);
self.should_clear_display_before_rendering = true;
if let Some(pane_id) = focus_pane_id {
self.focus_pane_id = Some(pane_id);
self.tiled_panes.focus_pane(pane_id, client_id);
} else {
// This is the end of the nasty viewport hack...
let next_selectable_pane_id = self.tiled_panes.first_selectable_pane_id();
match next_selectable_pane_id {
Some(active_pane_id) => {
self.tiled_panes.focus_pane(active_pane_id, client_id);
}
None => {
// this is very likely a configuration error (layout with no selectable panes)
self.tiled_panes.clear_active_panes();
}
}
}
}
pub fn update_input_modes(&mut self) {
// this updates all plugins with the client's input mode
let mode_infos = self.mode_info.borrow();
for client_id in self.connected_clients.borrow().iter() {
let mode_info = mode_infos.get(client_id).unwrap_or(&self.default_mode_info);
self.senders
.send_to_plugin(PluginInstruction::Update(
None,
Some(*client_id),
Event::ModeUpdate(mode_info.clone()),
))
.unwrap();
}
}
pub fn add_client(&mut self, client_id: ClientId, mode_info: Option<ModeInfo>) {
let other_clients_exist_in_tab = { !self.connected_clients.borrow().is_empty() };
if other_clients_exist_in_tab {
if let Some(first_active_floating_pane_id) =
self.floating_panes.first_active_floating_pane_id()
{
self.floating_panes
.focus_pane(first_active_floating_pane_id, client_id);
}
if let Some(first_active_tiled_pane_id) = self.tiled_panes.first_active_pane_id() {
self.tiled_panes
.focus_pane(first_active_tiled_pane_id, client_id);
}
self.connected_clients.borrow_mut().insert(client_id);
self.mode_info.borrow_mut().insert(
client_id,
mode_info.unwrap_or_else(|| self.default_mode_info.clone()),
);
} else {
let mut pane_ids: Vec<PaneId> = self.tiled_panes.pane_ids().copied().collect();
if pane_ids.is_empty() {
// no panes here, bye bye
return;
}
let focus_pane_id = self.focus_pane_id.unwrap_or_else(|| {
pane_ids.sort(); // TODO: make this predictable
pane_ids.retain(|p| !self.tiled_panes.panes_to_hide_contains(*p));
*pane_ids.get(0).unwrap()
});
self.tiled_panes.focus_pane(focus_pane_id, client_id);
self.connected_clients.borrow_mut().insert(client_id);
self.mode_info.borrow_mut().insert(
client_id,
mode_info.unwrap_or_else(|| self.default_mode_info.clone()),
);
}
self.set_force_render();
self.update_input_modes();
}
pub fn change_mode_info(&mut self, mode_info: ModeInfo, client_id: ClientId) {
self.mode_info.borrow_mut().insert(client_id, mode_info);
}
pub fn add_multiple_clients(&mut self, client_ids_to_mode_infos: Vec<(ClientId, ModeInfo)>) {
for (client_id, client_mode_info) in client_ids_to_mode_infos {
self.add_client(client_id, None);
self.mode_info
.borrow_mut()
.insert(client_id, client_mode_info);
}
}
pub fn remove_client(&mut self, client_id: ClientId) {
self.focus_pane_id = None;
self.connected_clients.borrow_mut().remove(&client_id);
self.set_force_render();
}
pub fn drain_connected_clients(
&mut self,
clients_to_drain: Option<Vec<ClientId>>,
) -> Vec<(ClientId, ModeInfo)> {
// None => all clients
let mut client_ids_to_mode_infos = vec![];
let clients_to_drain = clients_to_drain
.unwrap_or_else(|| self.connected_clients.borrow_mut().drain().collect());
for client_id in clients_to_drain {
client_ids_to_mode_infos.push(self.drain_single_client(client_id));
}
client_ids_to_mode_infos
}
pub fn drain_single_client(&mut self, client_id: ClientId) -> (ClientId, ModeInfo) {
let client_mode_info = self
.mode_info
.borrow_mut()
.remove(&client_id)
.unwrap_or_else(|| self.default_mode_info.clone());
self.connected_clients.borrow_mut().remove(&client_id);
(client_id, client_mode_info)
}
pub fn has_no_connected_clients(&self) -> bool {
self.connected_clients.borrow().is_empty()
}
pub fn toggle_pane_embed_or_floating(&mut self, client_id: ClientId) {
if self.tiled_panes.fullscreen_is_active() {
self.tiled_panes.unset_fullscreen();
}
if self.floating_panes.panes_are_visible() {
if let Some(focused_floating_pane_id) = self.floating_panes.active_pane_id(client_id) {
if self.tiled_panes.has_room_for_new_pane() {
// this unwrap is safe because floating panes should not be visible if there are no floating panes
let floating_pane_to_embed = self.close_pane(focused_floating_pane_id).unwrap();
self.tiled_panes
.insert_pane(focused_floating_pane_id, floating_pane_to_embed);
self.should_clear_display_before_rendering = true;
self.tiled_panes
.focus_pane(focused_floating_pane_id, client_id);
self.floating_panes.toggle_show_panes(false);
}
}
} else if let Some(focused_pane_id) = self.tiled_panes.focused_pane_id(client_id) {
if let Some(new_pane_geom) = self.floating_panes.find_room_for_new_pane() {
if self.get_selectable_tiled_panes().count() <= 1 {
// don't close the only pane on screen...
return;
}
if let Some(mut embedded_pane_to_float) = self.close_pane(focused_pane_id) {
embedded_pane_to_float.set_geom(new_pane_geom);
resize_pty!(embedded_pane_to_float, self.os_api);
embedded_pane_to_float.set_active_at(Instant::now());
self.floating_panes
.add_pane(focused_pane_id, embedded_pane_to_float);
self.floating_panes.focus_pane(focused_pane_id, client_id);
self.floating_panes.toggle_show_panes(true);
}
}
}
}
pub fn toggle_floating_panes(
&mut self,
client_id: ClientId,
default_shell: Option<TerminalAction>,
) {
if self.floating_panes.panes_are_visible() {
self.floating_panes.toggle_show_panes(false);
self.set_force_render();
} else {
self.floating_panes.toggle_show_panes(true);
match self.floating_panes.first_floating_pane_id() {
Some(first_floating_pane_id) => {
if !self.floating_panes.active_panes_contain(&client_id) {
self.floating_panes
.focus_pane(first_floating_pane_id, client_id);
}
}
None => {
// there aren't any floating panes, we need to open a new one
//
// ************************************************************************************************
// BEWARE - THIS IS NOT ATOMIC - this sends an instruction to the pty thread to open a new terminal
// the pty thread will do its thing and eventually come back to the new_pane
// method on this tab which will open a new floating pane because we just
// toggled their visibility above us.
// If the pty thread takes too long, weird things can happen...
// ************************************************************************************************
//
let instruction = PtyInstruction::SpawnTerminal(
default_shell,
ClientOrTabIndex::ClientId(client_id),
);
self.senders.send_to_pty(instruction).unwrap();
}
}
self.floating_panes.set_force_render();
}
self.set_force_render();
}
pub fn new_pane(&mut self, pid: PaneId, client_id: Option<ClientId>) {
self.close_down_to_max_terminals();
if self.floating_panes.panes_are_visible() {
if let Some(new_pane_geom) = self.floating_panes.find_room_for_new_pane() {
let next_terminal_position = self.get_next_terminal_position();
if let PaneId::Terminal(term_pid) = pid {
let mut new_pane = TerminalPane::new(
term_pid,
new_pane_geom,
self.style,
next_terminal_position,
String::new(),
self.link_handler.clone(),
self.character_cell_size.clone(),
);
new_pane.set_content_offset(Offset::frame(1)); // floating panes always have a frame
resize_pty!(new_pane, self.os_api);
self.floating_panes.add_pane(pid, Box::new(new_pane));
self.floating_panes.focus_pane_for_all_clients(pid);
}
}
} else {
if self.tiled_panes.fullscreen_is_active() {
self.tiled_panes.unset_fullscreen();
}
if self.tiled_panes.has_room_for_new_pane() {
if let PaneId::Terminal(term_pid) = pid {
let next_terminal_position = self.get_next_terminal_position();
let new_terminal = TerminalPane::new(
term_pid,
PaneGeom::default(), // the initial size will be set later
self.style,
next_terminal_position,
String::new(),
self.link_handler.clone(),
self.character_cell_size.clone(),
);
self.tiled_panes.insert_pane(pid, Box::new(new_terminal));
self.should_clear_display_before_rendering = true;
if let Some(client_id) = client_id {
self.tiled_panes.focus_pane(pid, client_id);
}
}
}
}
}
pub fn horizontal_split(&mut self, pid: PaneId, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
return;
}
self.close_down_to_max_terminals();
if self.tiled_panes.fullscreen_is_active() {
self.toggle_active_pane_fullscreen(client_id);
}
if self.tiled_panes.can_split_pane_horizontally(client_id) {
if let PaneId::Terminal(term_pid) = pid {
let next_terminal_position = self.get_next_terminal_position();
let new_terminal = TerminalPane::new(
term_pid,
PaneGeom::default(), // the initial size will be set later
self.style,
next_terminal_position,
String::new(),
self.link_handler.clone(),
self.character_cell_size.clone(),
);
self.tiled_panes
.split_pane_horizontally(pid, Box::new(new_terminal), client_id);
self.should_clear_display_before_rendering = true;
self.tiled_panes.focus_pane(pid, client_id);
}
}
}
pub fn vertical_split(&mut self, pid: PaneId, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
return;
}
self.close_down_to_max_terminals();
if self.tiled_panes.fullscreen_is_active() {
self.toggle_active_pane_fullscreen(client_id);
}
if self.tiled_panes.can_split_pane_vertically(client_id) {
if let PaneId::Terminal(term_pid) = pid {
let next_terminal_position = self.get_next_terminal_position();
let new_terminal = TerminalPane::new(
term_pid,
PaneGeom::default(), // the initial size will be set later
self.style,
next_terminal_position,
String::new(),
self.link_handler.clone(),
self.character_cell_size.clone(),
);
self.tiled_panes
.split_pane_vertically(pid, Box::new(new_terminal), client_id);
self.should_clear_display_before_rendering = true;
self.tiled_panes.focus_pane(pid, client_id);
}
}
}
pub fn get_active_pane(&self, client_id: ClientId) -> Option<&dyn Pane> {
self.get_active_pane_id(client_id).and_then(|ap| {
if self.floating_panes.panes_are_visible() {
self.floating_panes.get_pane(ap).map(Box::as_ref)
} else {
self.tiled_panes.get_pane(ap).map(Box::as_ref)
}
})
}
pub fn get_active_pane_mut(&mut self, client_id: ClientId) -> Option<&mut Box<dyn Pane>> {
self.get_active_pane_id(client_id).and_then(|ap| {
if self.floating_panes.panes_are_visible() {
self.floating_panes.get_pane_mut(ap)
} else {
self.tiled_panes.get_pane_mut(ap)
}
})
}
pub fn get_active_pane_or_floating_pane_mut(
&mut self,
client_id: ClientId,
) -> Option<&mut Box<dyn Pane>> {
if self.floating_panes.panes_are_visible() && self.floating_panes.has_active_panes() {
self.floating_panes.get_active_pane_mut(client_id)
} else {
self.get_active_pane_mut(client_id)
}
}
pub fn get_active_pane_id(&self, client_id: ClientId) -> Option<PaneId> {
if self.floating_panes.panes_are_visible() {
self.floating_panes.get_active_pane_id(client_id)
} else {
self.tiled_panes.get_active_pane_id(client_id)
}
}
fn get_active_terminal_id(&self, client_id: ClientId) -> Option<RawFd> {
if let Some(PaneId::Terminal(pid)) = self.get_active_pane_id(client_id) {
Some(pid)
} else {
None
}
}
pub fn has_terminal_pid(&self, pid: RawFd) -> bool {
self.tiled_panes.panes_contain(&PaneId::Terminal(pid))
|| self.floating_panes.panes_contain(&PaneId::Terminal(pid))
}
pub fn handle_pty_bytes(&mut self, pid: RawFd, bytes: VteBytes) {
if let Some(terminal_output) = self
.tiled_panes
.get_pane_mut(PaneId::Terminal(pid))
.or_else(|| self.floating_panes.get_pane_mut(PaneId::Terminal(pid)))
{
// If the pane is scrolled buffer the vte events
if terminal_output.is_scrolled() {
self.pending_vte_events.entry(pid).or_default().push(bytes);
if let Some(evs) = self.pending_vte_events.get(&pid) {
// Reset scroll - and process all pending events for this pane
if evs.len() >= MAX_PENDING_VTE_EVENTS {
terminal_output.clear_scroll();
self.process_pending_vte_events(pid);
}
}
return;
}
}
self.process_pty_bytes(pid, bytes);
}
pub fn process_pending_vte_events(&mut self, pid: RawFd) {
if let Some(pending_vte_events) = self.pending_vte_events.get_mut(&pid) {
let vte_events: Vec<VteBytes> = pending_vte_events.drain(..).collect();
for vte_event in vte_events {
self.process_pty_bytes(pid, vte_event);
}
}
}
fn process_pty_bytes(&mut self, pid: RawFd, bytes: VteBytes) {
if let Some(terminal_output) = self
.tiled_panes
.get_pane_mut(PaneId::Terminal(pid))
.or_else(|| self.floating_panes.get_pane_mut(PaneId::Terminal(pid)))
{
terminal_output.handle_pty_bytes(bytes);
let messages_to_pty = terminal_output.drain_messages_to_pty();
for message in messages_to_pty {
self.write_to_pane_id(message, PaneId::Terminal(pid));
}
}
}
pub fn write_to_terminals_on_current_tab(&mut self, input_bytes: Vec<u8>) {
let pane_ids = self.get_static_and_floating_pane_ids();
pane_ids.iter().for_each(|&pane_id| {
self.write_to_pane_id(input_bytes.clone(), pane_id);
});
}
pub fn write_to_active_terminal(&mut self, input_bytes: Vec<u8>, client_id: ClientId) {
let pane_id = if self.floating_panes.panes_are_visible() {
self.floating_panes
.get_active_pane_id(client_id)
.unwrap_or_else(|| self.tiled_panes.get_active_pane_id(client_id).unwrap())
} else {
self.tiled_panes.get_active_pane_id(client_id).unwrap()
};
self.write_to_pane_id(input_bytes, pane_id);
}
pub fn write_to_terminal_at(&mut self, input_bytes: Vec<u8>, position: &Position) {
if self.floating_panes.panes_are_visible() {
let pane_id = self.floating_panes.get_pane_id_at(position, false);
if let Some(pane_id) = pane_id {
self.write_to_pane_id(input_bytes, pane_id);
return;
}
}
let pane_id = self.get_pane_id_at(position, false);
if let Some(pane_id) = pane_id {
self.write_to_pane_id(input_bytes, pane_id);
}
}
pub fn write_to_pane_id(&mut self, input_bytes: Vec<u8>, pane_id: PaneId) {
match pane_id {
PaneId::Terminal(active_terminal_id) => {
let active_terminal = self
.floating_panes
.get(&pane_id)
.unwrap_or_else(|| self.tiled_panes.get_pane(pane_id).unwrap());
let adjusted_input = active_terminal.adjust_input_to_terminal(input_bytes);
if let Err(e) = self
.os_api
.write_to_tty_stdin(active_terminal_id, &adjusted_input)
{
log::error!("failed to write to terminal: {}", e);
}
if let Err(e) = self.os_api.tcdrain(active_terminal_id) {
log::error!("failed to drain terminal: {}", e);
}
}
PaneId::Plugin(pid) => {
for key in parse_keys(&input_bytes) {
self.senders
.send_to_plugin(PluginInstruction::Update(Some(pid), None, Event::Key(key)))
.unwrap()
}
}
}
}
pub fn get_active_terminal_cursor_position(
&self,
client_id: ClientId,
) -> Option<(usize, usize)> {
// (x, y)
let active_pane_id = if self.floating_panes.panes_are_visible() {
self.floating_panes
.get_active_pane_id(client_id)
.or_else(|| self.tiled_panes.get_active_pane_id(client_id))?
} else {
self.tiled_panes.get_active_pane_id(client_id)?
};
let active_terminal = &self
.floating_panes
.get(&active_pane_id)
.or_else(|| self.tiled_panes.get_pane(active_pane_id))?;
active_terminal
.cursor_coordinates()
.map(|(x_in_terminal, y_in_terminal)| {
let x = active_terminal.x() + x_in_terminal;
let y = active_terminal.y() + y_in_terminal;
(x, y)
})
}
pub fn toggle_active_pane_fullscreen(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
return;
}
self.tiled_panes.toggle_active_pane_fullscreen(client_id);
}
pub fn is_fullscreen_active(&self) -> bool {
self.tiled_panes.fullscreen_is_active()
}
pub fn are_floating_panes_visible(&self) -> bool {
self.floating_panes.panes_are_visible()
}
pub fn set_force_render(&mut self) {
self.tiled_panes.set_force_render();
self.floating_panes.set_force_render();
}
pub fn is_sync_panes_active(&self) -> bool {
self.synchronize_is_active
}
pub fn toggle_sync_panes_is_active(&mut self) {
self.synchronize_is_active = !self.synchronize_is_active;
}
pub fn mark_active_pane_for_rerender(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_mut(client_id) {
active_pane.set_should_render(true);
}
}
fn update_active_panes_in_pty_thread(&self) {
// this is a bit hacky and we should ideally not keep this state in two different places at
// some point
let connected_clients: Vec<ClientId> =
{ self.connected_clients.borrow().iter().copied().collect() };
for client_id in connected_clients {
self.senders
.send_to_pty(PtyInstruction::UpdateActivePane(
self.get_active_pane_id(client_id),
client_id,
))
.unwrap();
}
}
pub fn render(&mut self, output: &mut Output, overlay: Option<String>) {
let connected_clients: HashSet<ClientId> =
{ self.connected_clients.borrow().iter().copied().collect() };
if connected_clients.is_empty() || !self.tiled_panes.has_active_panes() {
return;
}
self.update_active_panes_in_pty_thread();
let floating_panes_stack = self.floating_panes.stack();
output.add_clients(
&connected_clients,
self.link_handler.clone(),
floating_panes_stack,
);
self.hide_cursor_and_clear_display_as_needed(output);
self.tiled_panes
.render(output, self.floating_panes.panes_are_visible());
if self.floating_panes.panes_are_visible() && self.floating_panes.has_active_panes() {
self.floating_panes.render(output);
}
// FIXME: Once clients can be distinguished
if let Some(overlay_vte) = &overlay {
output.add_post_vte_instruction_to_multiple_clients(
connected_clients.iter().copied(),
overlay_vte,
);
}
self.render_cursor(output);
}
fn hide_cursor_and_clear_display_as_needed(&mut self, output: &mut Output) {
let hide_cursor = "\u{1b}[?25l";
let connected_clients: Vec<ClientId> =
{ self.connected_clients.borrow().iter().copied().collect() };
output.add_pre_vte_instruction_to_multiple_clients(
connected_clients.iter().copied(),
hide_cursor,
);
if self.should_clear_display_before_rendering {
let clear_display = "\u{1b}[2J";
output.add_pre_vte_instruction_to_multiple_clients(
connected_clients.iter().copied(),
clear_display,
);
self.should_clear_display_before_rendering = false;
}
}
fn render_cursor(&self, output: &mut Output) {
let connected_clients: Vec<ClientId> =
{ self.connected_clients.borrow().iter().copied().collect() };
for client_id in connected_clients {
match self.get_active_terminal_cursor_position(client_id) {
Some((cursor_position_x, cursor_position_y)) => {
let show_cursor = "\u{1b}[?25h";
let change_cursor_shape = self
.get_active_pane(client_id)
.map(|ap| ap.cursor_shape_csi())
.unwrap_or_default();
let goto_cursor_position = &format!(
"\u{1b}[{};{}H\u{1b}[m{}",
cursor_position_y + 1,
cursor_position_x + 1,
change_cursor_shape
); // goto row/col
output.add_post_vte_instruction_to_client(client_id, show_cursor);
output.add_post_vte_instruction_to_client(client_id, goto_cursor_position);
}
None => {
let hide_cursor = "\u{1b}[?25l";
output.add_post_vte_instruction_to_client(client_id, hide_cursor);
}
}
}
}
fn get_tiled_panes(&self) -> impl Iterator<Item = (&PaneId, &Box<dyn Pane>)> {
self.tiled_panes.get_panes()
}
fn get_selectable_tiled_panes(&self) -> impl Iterator<Item = (&PaneId, &Box<dyn Pane>)> {
self.get_tiled_panes().filter(|(_, p)| p.selectable())
}
fn get_next_terminal_position(&self) -> usize {
let tiled_panes_count = self
.tiled_panes
.get_panes()
.filter(|(k, _)| match k {
PaneId::Plugin(_) => false,
PaneId::Terminal(_) => true,
})
.count();
let floating_panes_count = self
.floating_panes
.get_panes()
.filter(|(k, _)| match k {
PaneId::Plugin(_) => false,
PaneId::Terminal(_) => true,
})
.count();
tiled_panes_count + floating_panes_count + 1
}
pub fn has_selectable_panes(&self) -> bool {
let selectable_tiled_panes = self.tiled_panes.get_panes().filter(|(_, p)| p.selectable());
let selectable_floating_panes = self
.floating_panes
.get_panes()
.filter(|(_, p)| p.selectable());
selectable_tiled_panes.count() > 0 || selectable_floating_panes.count() > 0
}
pub fn has_selectable_tiled_panes(&self) -> bool {
let selectable_tiled_panes = self.tiled_panes.get_panes().filter(|(_, p)| p.selectable());
selectable_tiled_panes.count() > 0
}
pub fn resize_whole_tab(&mut self, new_screen_size: Size) {
self.floating_panes.resize(new_screen_size);
self.tiled_panes.resize(new_screen_size);
self.should_clear_display_before_rendering = true;
}
pub fn resize_left(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
let successfully_resized = self
.floating_panes
.resize_active_pane_left(client_id, &mut self.os_api);
if successfully_resized {
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" incase of a decrease
}
} else {
self.tiled_panes.resize_active_pane_left(client_id);
}
}
pub fn resize_right(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
let successfully_resized = self
.floating_panes
.resize_active_pane_right(client_id, &mut self.os_api);
if successfully_resized {
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" incase of a decrease
}
} else {
self.tiled_panes.resize_active_pane_right(client_id);
}
}
pub fn resize_down(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
let successfully_resized = self
.floating_panes
.resize_active_pane_down(client_id, &mut self.os_api);
if successfully_resized {
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" incase of a decrease
}
} else {
self.tiled_panes.resize_active_pane_down(client_id);
}
}
pub fn resize_up(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
let successfully_resized = self
.floating_panes
.resize_active_pane_up(client_id, &mut self.os_api);
if successfully_resized {
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" incase of a decrease
}
} else {
self.tiled_panes.resize_active_pane_up(client_id);
}
}
pub fn resize_increase(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
let successfully_resized = self
.floating_panes
.resize_active_pane_increase(client_id, &mut self.os_api);
if successfully_resized {
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" incase of a decrease
}
} else {
self.tiled_panes.resize_active_pane_increase(client_id);
}
}
pub fn resize_decrease(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
let successfully_resized = self
.floating_panes
.resize_active_pane_decrease(client_id, &mut self.os_api);
if successfully_resized {
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" incase of a decrease
}
} else {
self.tiled_panes.resize_active_pane_decrease(client_id);
}
}
fn set_pane_active_at(&mut self, pane_id: PaneId) {
if let Some(pane) = self.tiled_panes.get_pane_mut(pane_id) {
pane.set_active_at(Instant::now());
} else if let Some(pane) = self.floating_panes.get_pane_mut(pane_id) {
pane.set_active_at(Instant::now());
}
}
pub fn focus_next_pane(&mut self, client_id: ClientId) {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.focus_next_pane(client_id);
}
pub fn focus_previous_pane(&mut self, client_id: ClientId) {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.focus_previous_pane(client_id);
}
// returns a boolean that indicates whether the focus moved
pub fn move_focus_left(&mut self, client_id: ClientId) -> bool {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_focus_left(
client_id,
&self.connected_clients.borrow().iter().copied().collect(),
)
} else {
if !self.has_selectable_panes() {
return false;
}
if self.tiled_panes.fullscreen_is_active() {
return false;
}
self.tiled_panes.move_focus_left(client_id)
}
}
pub fn move_focus_down(&mut self, client_id: ClientId) -> bool {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_focus_down(
client_id,
&self.connected_clients.borrow().iter().copied().collect(),
)
} else {
if !self.has_selectable_panes() {
return false;
}
if self.tiled_panes.fullscreen_is_active() {
return false;
}
self.tiled_panes.move_focus_down(client_id)
}
}
pub fn move_focus_up(&mut self, client_id: ClientId) -> bool {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_focus_up(
client_id,
&self.connected_clients.borrow().iter().copied().collect(),
)
} else {
if !self.has_selectable_panes() {
return false;
}
if self.tiled_panes.fullscreen_is_active() {
return false;
}
self.tiled_panes.move_focus_up(client_id)
}
}
// returns a boolean that indicates whether the focus moved
pub fn move_focus_right(&mut self, client_id: ClientId) -> bool {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_focus_right(
client_id,
&self.connected_clients.borrow().iter().copied().collect(),
)
} else {
if !self.has_selectable_panes() {
return false;
}
if self.tiled_panes.fullscreen_is_active() {
return false;
}
self.tiled_panes.move_focus_right(client_id)
}
}
pub fn move_active_pane(&mut self, client_id: ClientId) {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.move_active_pane(client_id);
}
pub fn move_active_pane_down(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_active_pane_down(client_id);
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" behind
} else {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.move_active_pane_down(client_id);
}
}
pub fn move_active_pane_up(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_active_pane_up(client_id);
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" behind
} else {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.move_active_pane_up(client_id);
}
}
pub fn move_active_pane_right(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_active_pane_right(client_id);
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" behind
} else {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.move_active_pane_right(client_id);
}
}
pub fn move_active_pane_left(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
self.floating_panes.move_active_pane_left(client_id);
self.set_force_render(); // we force render here to make sure the panes under the floating pane render and don't leave "garbage" behind
} else {
if !self.has_selectable_panes() {
return;
}
if self.tiled_panes.fullscreen_is_active() {
return;
}
self.tiled_panes.move_active_pane_left(client_id);
}
}
fn close_down_to_max_terminals(&mut self) {
if let Some(max_panes) = self.max_panes {
let terminals = self.get_tiled_pane_ids();
for &pid in terminals.iter().skip(max_panes - 1) {
self.senders
.send_to_pty(PtyInstruction::ClosePane(pid))
.unwrap();
self.close_pane(pid);
}
}
}
pub fn get_tiled_pane_ids(&self) -> Vec<PaneId> {
self.get_tiled_panes().map(|(&pid, _)| pid).collect()
}
pub fn get_all_pane_ids(&self) -> Vec<PaneId> {
// this is here just as a naming thing to make things more explicit
self.get_static_and_floating_pane_ids()
}
pub fn get_static_and_floating_pane_ids(&self) -> Vec<PaneId> {
self.tiled_panes
.pane_ids()
.chain(self.floating_panes.pane_ids())
.copied()
.collect()
}
pub fn set_pane_selectable(&mut self, id: PaneId, selectable: bool) {
if let Some(pane) = self.tiled_panes.get_pane_mut(id) {
pane.set_selectable(selectable);
if !selectable {
// there are some edge cases in which this causes a hard crash when there are no
// other selectable panes - ideally this should never happen unless it's a
// configuration error - but this *does* sometimes happen with the default
// configuration as well since we set this at run time. I left this here because
// this should very rarely happen and I hope in my heart that we will stop setting
// this at runtime in the default configuration at some point
//
// If however this is not the case and we find this does cause crashes, we can
// solve it by adding a "dangling_clients" struct to Tab which we would fill with
// the relevant client ids in this case and drain as soon as a new selectable pane
// is opened
self.tiled_panes.move_clients_out_of_pane(id);
}
}
}
pub fn close_pane(&mut self, id: PaneId) -> Option<Box<dyn Pane>> {
if self.floating_panes.panes_contain(&id) {
let closed_pane = self.floating_panes.remove_pane(id);
self.floating_panes.move_clients_out_of_pane(id);
if !self.floating_panes.has_panes() {
self.floating_panes.toggle_show_panes(false);
}
self.set_force_render();
self.floating_panes.set_force_render();
closed_pane
} else {
if self.tiled_panes.fullscreen_is_active() {
self.tiled_panes.unset_fullscreen();
}
let closed_pane = self.tiled_panes.remove_pane(id);
self.set_force_render();
self.tiled_panes.set_force_render();
closed_pane
}
}
pub fn close_focused_pane(&mut self, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
if let Some(active_floating_pane_id) = self.floating_panes.active_pane_id(client_id) {
self.close_pane(active_floating_pane_id);
self.senders
.send_to_pty(PtyInstruction::ClosePane(active_floating_pane_id))
.unwrap();
return;
}
}
if let Some(active_pane_id) = self.tiled_panes.get_active_pane_id(client_id) {
self.close_pane(active_pane_id);
self.senders
.send_to_pty(PtyInstruction::ClosePane(active_pane_id))
.unwrap();
}
}
pub fn scroll_active_terminal_up(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
active_pane.scroll_up(1, client_id);
}
}
pub fn scroll_active_terminal_down(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
active_pane.scroll_down(1, client_id);
if !active_pane.is_scrolled() {
if let PaneId::Terminal(raw_fd) = active_pane.pid() {
self.process_pending_vte_events(raw_fd);
}
}
}
}
pub fn scroll_active_terminal_up_page(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
// prevent overflow when row == 0
let scroll_rows = active_pane.rows().max(1) - 1;
active_pane.scroll_up(scroll_rows, client_id);
}
}
pub fn scroll_active_terminal_down_page(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
let scroll_rows = active_pane.get_content_rows();
active_pane.scroll_down(scroll_rows, client_id);
if !active_pane.is_scrolled() {
if let PaneId::Terminal(raw_fd) = active_pane.pid() {
self.process_pending_vte_events(raw_fd);
}
}
}
}
pub fn scroll_active_terminal_up_half_page(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
// prevent overflow when row == 0
let scroll_rows = (active_pane.rows().max(1) - 1) / 2;
active_pane.scroll_up(scroll_rows, client_id);
}
}
pub fn scroll_active_terminal_down_half_page(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
let scroll_rows = (active_pane.rows().max(1) - 1) / 2;
active_pane.scroll_down(scroll_rows, client_id);
if !active_pane.is_scrolled() {
if let PaneId::Terminal(raw_fd) = active_pane.pid() {
self.process_pending_vte_events(raw_fd);
}
}
}
}
pub fn scroll_active_terminal_to_bottom(&mut self, client_id: ClientId) {
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
active_pane.clear_scroll();
if !active_pane.is_scrolled() {
if let PaneId::Terminal(raw_fd) = active_pane.pid() {
self.process_pending_vte_events(raw_fd);
}
}
}
}
pub fn clear_active_terminal_scroll(&mut self, client_id: ClientId) {
// TODO: is this a thing?
if let Some(active_pane) = self.get_active_pane_or_floating_pane_mut(client_id) {
active_pane.clear_scroll();
if !active_pane.is_scrolled() {
if let PaneId::Terminal(raw_fd) = active_pane.pid() {
self.process_pending_vte_events(raw_fd);
}
}
}
}
pub fn scroll_terminal_up(&mut self, point: &Position, lines: usize, client_id: ClientId) {
if let Some(pane) = self.get_pane_at(point, false) {
if pane.mouse_mode() {
let relative_position = pane.relative_position(point);
let mouse_event = format!(
"\u{1b}[<64;{:?};{:?}M",
relative_position.column.0 + 1,
relative_position.line.0 + 1
);
self.write_to_terminal_at(mouse_event.into_bytes(), point);
} else {
pane.scroll_up(lines, client_id);
}
}
}
pub fn scroll_terminal_down(&mut self, point: &Position, lines: usize, client_id: ClientId) {
if let Some(pane) = self.get_pane_at(point, false) {
if pane.mouse_mode() {
let relative_position = pane.relative_position(point);
let mouse_event = format!(
"\u{1b}[<65;{:?};{:?}M",
relative_position.column.0 + 1,
relative_position.line.0 + 1
);
self.write_to_terminal_at(mouse_event.into_bytes(), point);
} else {
pane.scroll_down(lines, client_id);
if !pane.is_scrolled() {
if let PaneId::Terminal(pid) = pane.pid() {
self.process_pending_vte_events(pid);
}
}
}
}
}
fn get_pane_at(
&mut self,
point: &Position,
search_selectable: bool,
) -> Option<&mut Box<dyn Pane>> {
if self.floating_panes.panes_are_visible() {
if let Some(pane_id) = self.floating_panes.get_pane_id_at(point, search_selectable) {
return self.floating_panes.get_pane_mut(pane_id);
}
}
if let Some(pane_id) = self.get_pane_id_at(point, search_selectable) {
self.tiled_panes.get_pane_mut(pane_id)
} else {
None
}
}
fn get_pane_id_at(&self, point: &Position, search_selectable: bool) -> Option<PaneId> {
if self.tiled_panes.fullscreen_is_active() && self.is_position_inside_viewport(point) {
let first_client_id = {
self.connected_clients
.borrow()
.iter()
.copied()
.next()
.unwrap()
}; // TODO: instead of doing this, record the pane that is in fullscreen
return self.tiled_panes.get_active_pane_id(first_client_id);
}
if search_selectable {
self.get_selectable_tiled_panes()
.find(|(_, p)| p.contains(point))
.map(|(&id, _)| id)
} else {
self.get_tiled_panes()
.find(|(_, p)| p.contains(point))
.map(|(&id, _)| id)
}
}
pub fn handle_left_click(&mut self, position: &Position, client_id: ClientId) {
self.focus_pane_at(position, client_id);
let search_selectable = false;
if self.floating_panes.panes_are_visible()
&& self
.floating_panes
.move_pane_with_mouse(*position, search_selectable)
{
self.set_force_render();
return;
}
if let Some(pane) = self.get_pane_at(position, false) {
let relative_position = pane.relative_position(position);
if pane.mouse_mode() {
let mouse_event = format!(
"\u{1b}[<0;{:?};{:?}M",
relative_position.column.0 + 1,
relative_position.line.0 + 1
);
self.write_to_active_terminal(mouse_event.into_bytes(), client_id);
} else {
pane.start_selection(&relative_position, client_id);
self.selecting_with_mouse = true;
}
};
}
pub fn handle_right_click(&mut self, position: &Position, client_id: ClientId) {
self.focus_pane_at(position, client_id);
if let Some(pane) = self.get_pane_at(position, false) {
let relative_position = pane.relative_position(position);
if pane.mouse_mode() {
let mouse_event = format!(
"\u{1b}[<2;{:?};{:?}M",
relative_position.column.0 + 1,
relative_position.line.0 + 1
);
self.write_to_active_terminal(mouse_event.into_bytes(), client_id);
} else {
pane.handle_right_click(&relative_position, client_id);
}
};
}
fn focus_pane_at(&mut self, point: &Position, client_id: ClientId) {
if self.floating_panes.panes_are_visible() {
if let Some(clicked_pane) = self.floating_panes.get_pane_id_at(point, true) {
self.floating_panes.focus_pane(clicked_pane, client_id);
self.set_pane_active_at(clicked_pane);
return;
}
}
if let Some(clicked_pane) = self.get_pane_id_at(point, true) {
self.tiled_panes.focus_pane(clicked_pane, client_id);
self.set_pane_active_at(clicked_pane);
if self.floating_panes.panes_are_visible() {
self.floating_panes.toggle_show_panes(false);
self.set_force_render();
}
}
}
pub fn handle_mouse_release(&mut self, position: &Position, client_id: ClientId) {
if self.floating_panes.panes_are_visible()
&& self.floating_panes.pane_is_being_moved_with_mouse()
{
self.floating_panes.stop_moving_pane_with_mouse(*position);
return;
}
// read these here to avoid use of borrowed `*self`, since we are holding active_pane
let selecting = self.selecting_with_mouse;
let copy_on_release = self.copy_on_select;
let active_pane = self.get_active_pane_or_floating_pane_mut(client_id);
if let Some(active_pane) = active_pane {
let relative_position = active_pane.relative_position(position);
if active_pane.mouse_mode() {
// ensure that coordinates are valid
let col = (relative_position.column.0 + 1)
.max(1)
.min(active_pane.get_content_columns());
let line = (relative_position.line.0 + 1)
.max(1)
.min(active_pane.get_content_rows() as isize);
let mouse_event = format!("\u{1b}[<0;{:?};{:?}m", col, line);
self.write_to_active_terminal(mouse_event.into_bytes(), client_id);
} else if selecting {
active_pane.end_selection(&relative_position, client_id);
if copy_on_release {
let selected_text = active_pane.get_selected_text();
active_pane.reset_selection();
if let Some(selected_text) = selected_text {
self.write_selection_to_clipboard(&selected_text);
}
}
self.selecting_with_mouse = false;
}
}
}
pub fn handle_mouse_hold(&mut self, position_on_screen: &Position, client_id: ClientId) {
let search_selectable = true;
if self.floating_panes.panes_are_visible()
&& self.floating_panes.pane_is_being_moved_with_mouse()
&& self
.floating_panes
.move_pane_with_mouse(*position_on_screen, search_selectable)
{
self.set_force_render();
return;
}
let selecting = self.selecting_with_mouse;
let active_pane = self.get_active_pane_or_floating_pane_mut(client_id);
if let Some(active_pane) = active_pane {
let relative_position = active_pane.relative_position(position_on_screen);
if active_pane.mouse_mode() {
// ensure that coordinates are valid
let col = (relative_position.column.0 + 1)
.max(1)
.min(active_pane.get_content_columns());
let line = (relative_position.line.0 + 1)
.max(1)
.min(active_pane.get_content_rows() as isize);
let mouse_event = format!("\u{1b}[<32;{:?};{:?}M", col, line);
self.write_to_active_terminal(mouse_event.into_bytes(), client_id);
} else if selecting {
active_pane.update_selection(&relative_position, client_id);
}
}
}
pub fn copy_selection(&self, client_id: ClientId) {
let selected_text = self
.get_active_pane(client_id)
.and_then(|p| p.get_selected_text());
if let Some(selected_text) = selected_text {
self.write_selection_to_clipboard(&selected_text);
self.senders
.send_to_plugin(PluginInstruction::Update(
None,
None,
Event::CopyToClipboard(self.clipboard_provider.as_copy_destination()),
))
.unwrap();
}
}
fn write_selection_to_clipboard(&self, selection: &str) {
let mut output = Output::default();
let connected_clients: HashSet<ClientId> =
{ self.connected_clients.borrow().iter().copied().collect() };
output.add_clients(&connected_clients, self.link_handler.clone(), None);
let client_ids = connected_clients.iter().copied();
let clipboard_event =
match self
.clipboard_provider
.set_content(selection, &mut output, client_ids)
{
Ok(_) => {
let serialized_output = output.serialize();
self.senders
.send_to_server(ServerInstruction::Render(Some(serialized_output)))
.unwrap();
Event::CopyToClipboard(self.clipboard_provider.as_copy_destination())
}
Err(err) => {
log::error!("could not write selection to clipboard: {}", err);
Event::SystemClipboardFailure
}
};
self.senders
.send_to_plugin(PluginInstruction::Update(None, None, clipboard_event))
.unwrap();
}
fn offset_viewport(&mut self, position_and_size: &Viewport) {
let mut viewport = self.viewport.borrow_mut();
if position_and_size.x == viewport.x
&& position_and_size.x + position_and_size.cols == viewport.x + viewport.cols
{
if position_and_size.y == viewport.y {
viewport.y += position_and_size.rows;
viewport.rows -= position_and_size.rows;
} else if position_and_size.y + position_and_size.rows == viewport.y + viewport.rows {
viewport.rows -= position_and_size.rows;
}
}
if position_and_size.y == viewport.y
&& position_and_size.y + position_and_size.rows == viewport.y + viewport.rows
{
if position_and_size.x == viewport.x {
viewport.x += position_and_size.cols;
viewport.cols -= position_and_size.cols;
} else if position_and_size.x + position_and_size.cols == viewport.x + viewport.cols {
viewport.cols -= position_and_size.cols;
}
}
}
pub fn visible(&self, visible: bool) {
let pids_in_this_tab = self.tiled_panes.pane_ids().filter_map(|p| match p {
PaneId::Plugin(pid) => Some(pid),
_ => None,
});
for pid in pids_in_this_tab {
self.senders
.send_to_plugin(PluginInstruction::Update(
Some(*pid),
None,
Event::Visible(visible),
))
.unwrap();
}
}
pub fn update_active_pane_name(&mut self, buf: Vec<u8>, client_id: ClientId) {
if let Some(active_terminal_id) = self.get_active_terminal_id(client_id) {
let active_terminal = self
.tiled_panes
.get_pane_mut(PaneId::Terminal(active_terminal_id))
.unwrap();
// It only allows printable unicode, delete and backspace keys.
let is_updatable = buf.iter().all(|u| matches!(u, 0x20..=0x7E | 0x08 | 0x7F));
if is_updatable {
let s = str::from_utf8(&buf).unwrap();
active_terminal.update_name(s);
}
}
}
pub fn is_position_inside_viewport(&self, point: &Position) -> bool {
let Position {
line: Line(line),
column: Column(column),
} = *point;
let line: usize = line.try_into().unwrap();
let viewport = self.viewport.borrow();
line >= viewport.y
&& column >= viewport.x
&& line <= viewport.y + viewport.rows
&& column <= viewport.x + viewport.cols
}
pub fn set_pane_frames(&mut self, should_set_pane_frames: bool) {
self.tiled_panes.set_pane_frames(should_set_pane_frames);
self.should_clear_display_before_rendering = true;
self.set_force_render();
}
pub fn panes_to_hide_count(&self) -> usize {
self.tiled_panes.panes_to_hide_count()
}
}
#[cfg(test)]
#[path = "./unit/tab_tests.rs"]
mod tab_tests;
#[cfg(test)]
#[path = "./unit/tab_integration_tests.rs"]
mod tab_integration_tests;
| 41.839016 | 165 | 0.575484 |
896172108f2bbc1d5ceb0848cef9753111b0b58f | 5,426 | /// Information about the isometric grid the game is drawn on
use bevy::prelude::*;
use bevy_ecs_tilemap::prelude::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use crate::{texture::TextureHandles, util, WORLD_SIZE};
pub const GRID_COUNT: u32 = WORLD_SIZE as u32;
pub const TILE_WIDTH: f32 = 102.;
pub const TILE_HEIGHT: f32 = 51.;
pub const TILE_SIZE: (f32, f32) = (TILE_WIDTH, TILE_HEIGHT);
pub const SPRITE_SHEET_TILE_COUNT: f32 = 10.;
pub const CHUNKS: (u32, u32) = (2, 2);
pub const CHUNK_SIZE: (u32, u32) = (GRID_COUNT / CHUNKS.0, GRID_COUNT / CHUNKS.1);
pub const ORIGIN_OFFSET: f32 = TILE_SIZE.1 / 2.0 * GRID_COUNT as f32;
// @NOTE: Going forward, I'm enforcing that all world sprites (iso perspective) are a standard size at export time.
// This prevents us from having to track the sprite offsets and calculate on the fly. This also
// makes sure we have equally sized pixels for all sprites drawn in the world
//
// World sprites are:
// * Aligned to 3d isometric tiles that are 26 x 26 x 26 'hexels'
// * each hexel is 2 x 2 pixels
// * The resulting size of the final image is exactly 102 x 104 pixels
//
// The only exception to this is the sprite sheet used in `crate::grid` as they are drawn
// separately by the tile system with their own fixed dimensions. Generally though the tile sprites
// should just be half the height of world sprites
//
// If we do still end up needing dyanmic offset we can bring the GridOffset back
/// Helper function to project world position to isometric
pub fn world_to_iso(pos: Vec2) -> Vec3 {
// FIXME: naive attempt at fixing z fighting
let ordered_z = pos.y + pos.x;
let (x, y) = (pos.x, pos.y);
let px = (x - y) * (TILE_WIDTH / 2.0);
let py = (x + y) * (TILE_HEIGHT / 2.0);
Vec3::new(px, -py + ORIGIN_OFFSET, ordered_z)
}
pub fn world_to_iso_no_offset(pos: Vec2) -> Vec3 {
// FIXME: naive attempt at fixing z fighting
let ordered_z = pos.y + pos.x;
let (x, y) = (pos.x - 0.5, pos.y - 0.5);
let px = (x - y) * (TILE_WIDTH / 2.0);
let py = (x + y) * (TILE_HEIGHT / 2.0);
Vec3::new(px, -py + ORIGIN_OFFSET, ordered_z)
}
/// Helper function converting an iso position to world position
/// GENERICALLY
/// x = (y_world / (grid_len/2)) + (x_world / (grid_len * y_to_x_pixels_ratio))
/// y = (y_world / (grid_len/2)) - (x_world / (grid_len))
/// i think?
/// assuming x_world and y_world have been translated such that 0,0 is the same as grid coords and
/// the direction of x and y share signs
// @TODO this assumes that the ratio of the pixels of x vs y as well as the ratio of grid cells vs
// pixels per grid cell
pub fn iso_to_world(iso: Vec3) -> Vec2 {
let x = iso.x;
let y = -(iso.y - ORIGIN_OFFSET);
let pos = Vec2::new(
(y + (x / 2.0)) * 2.0 / TILE_WIDTH,
(y - (x / 2.0)) / TILE_HEIGHT,
);
/*
println!("[y] {}", y);
println!("[undo] {}", pos);
pos.x = (y / GRID_COUNT_OVER_2_F32) + (x / GRID_COUNT_F32);
pos.y = (y / GRID_COUNT_OVER_2_F32) - (x / GRID_COUNT_F32);
println!("[shadowboxing] {}", grid);
*/
pos
}
/// Helper function to get the bevy_ecs_tilemap::TilePos from world position
pub fn tile_pos(pos: Vec2) -> TilePos {
if pos.x < 0. || pos.y < 0. {
util::unlikely(true);
TilePos(u32::MAX, u32::MAX)
} else {
TilePos(pos.x as u32, pos.y as u32)
}
}
pub fn setup(
mut commands: Commands,
mut map_query: MapQuery,
texture_handles: Res<TextureHandles>,
) {
let texture_handle = texture_handles["tiles"].clone();
// Create map entity and component:
let map_entity = commands.spawn().id();
let mut map = Map::new(0u16, map_entity);
let mut map_settings = LayerSettings::new(
MapSize(CHUNKS.0, CHUNKS.1),
ChunkSize(CHUNK_SIZE.0, CHUNK_SIZE.1),
TileSize(TILE_SIZE.0, TILE_SIZE.1),
TextureSize(TILE_SIZE.0 * SPRITE_SHEET_TILE_COUNT, TILE_SIZE.1),
);
map_settings.mesh_type = TilemapMeshType::Isometric(IsoType::Diamond);
// Layer 0
let (mut layer_0, layer_0_entity) =
LayerBuilder::<TileBundle>::new(&mut commands, map_settings, 0u16, 0u16);
map.add_layer(&mut commands, 0u16, layer_0_entity);
let mut rng = SmallRng::from_entropy();
for x in 0..GRID_COUNT {
for y in 0..GRID_COUNT {
let _ = layer_0.set_tile(
TilePos(x, y),
Tile {
texture_index: rng.gen_range(0..9 as u16),
..Default::default()
}
.into(),
);
}
}
map_query.build_layer(&mut commands, layer_0, texture_handle.clone());
let (mut layer_cursor, layer_cursor_entity) =
LayerBuilder::<TileBundle>::new(&mut commands, map_settings, 0u16, 0u16);
map.add_layer(&mut commands, u16::MAX, layer_cursor_entity);
layer_cursor.fill(
TilePos(0, 0),
TilePos(GRID_COUNT, GRID_COUNT),
Tile {
texture_index: 9,
visible: false,
..Default::default()
}
.into(),
);
map_query.build_layer(&mut commands, layer_cursor, texture_handle);
// Spawn Map
// Required in order to use map_query to retrieve layers/tiles.
commands
.entity(map_entity)
.insert(map)
.insert(Transform::from_xyz(0.0, ORIGIN_OFFSET.ceil(), -1.0))
.insert(GlobalTransform::default());
}
| 35.006452 | 115 | 0.632879 |
23b43ac7c45124cf2ad29ff74c3e2f0de799b3c7 | 191 | use serde_derive::Serialize;
#[derive(Serialize)]
enum Enum {
#[serde(serialize_with = "serialize_some_other_variant")]
Tuple(#[serde(skip_serializing)] String, u8),
}
fn main() {}
| 19.1 | 61 | 0.701571 |
899ada9ebf6d6a48811448cfea2d80aeed14da35 | 2,615 | use defi::DeFiContract;
use fungible_token::ContractContract as FtContract;
use near_sdk::json_types::U128;
use near_sdk::serde_json::json;
use near_sdk_sim::{
deploy, init_simulator, to_yocto, ContractAccount, UserAccount, DEFAULT_GAS, STORAGE_AMOUNT,
};
// Load in contract bytes at runtime
near_sdk_sim::lazy_static_include::lazy_static_include_bytes! {
FT_WASM_BYTES => "res/fungible_token.wasm",
DEFI_WASM_BYTES => "res/defi.wasm",
}
const FT_ID: &str = "synchro-token";
const DEFI_ID: &str = "defi";
// Register the given `user` with FT contract
pub fn register_user(user: &near_sdk_sim::UserAccount) {
user.call(
FT_ID.to_string(),
"storage_deposit",
&json!({
"account_id": user.valid_account_id()
})
.to_string()
.into_bytes(),
near_sdk_sim::DEFAULT_GAS / 2,
near_sdk::env::storage_byte_cost() * 125, // attached deposit
)
.assert_success();
}
pub fn init_no_macros(initial_balance: u128) -> (UserAccount, UserAccount, UserAccount) {
let root = init_simulator(None);
let ft = root.deploy(&FT_WASM_BYTES, FT_ID.into(), STORAGE_AMOUNT);
ft.call(
FT_ID.into(),
"new_default_meta",
&json!({
"owner_id": root.valid_account_id(),
"total_supply": U128::from(initial_balance),
})
.to_string()
.into_bytes(),
DEFAULT_GAS / 2,
0, // attached deposit
)
.assert_success();
let alice = root.create_user("alice".to_string(), to_yocto("100"));
register_user(&alice);
(root, ft, alice)
}
pub fn init_with_macros(
initial_balance: u128,
) -> (UserAccount, ContractAccount<FtContract>, ContractAccount<DeFiContract>, UserAccount) {
let root = init_simulator(None);
// uses default values for deposit and gas
let ft = deploy!(
// Contract Proxy
contract: FtContract,
// Contract account id
contract_id: FT_ID,
// Bytes of contract
bytes: &FT_WASM_BYTES,
// User deploying the contract,
signer_account: root,
// init method
init_method: new_default_meta(
root.valid_account_id(),
initial_balance.into()
)
);
let alice = root.create_user("alice".to_string(), to_yocto("100"));
register_user(&alice);
let defi = deploy!(
contract: DeFiContract,
contract_id: DEFI_ID,
bytes: &DEFI_WASM_BYTES,
signer_account: root,
init_method: new(
ft.valid_account_id()
)
);
(root, ft, defi, alice)
}
| 27.526316 | 96 | 0.624092 |
672014d121a66e63cdcef737bb4bfb5f67d75398 | 23,183 | // Copyright 2019-2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::{
app::{AppHandle, GlobalWindowEvent, GlobalWindowEventListener},
event::{Event, EventHandler, Listeners},
hooks::{InvokeHandler, OnPageLoad, PageLoadPayload},
plugin::PluginStore,
runtime::{
webview::{
CustomProtocol, FileDropEvent, FileDropHandler, InvokePayload, WebviewRpcHandler,
WindowBuilder,
},
window::{dpi::PhysicalSize, DetachedWindow, PendingWindow, WindowEvent},
Icon, Runtime,
},
utils::{
assets::Assets,
config::{AppUrl, Config, WindowUrl},
PackageInfo,
},
Context, Invoke, StateManager, Window,
};
#[cfg(target_os = "windows")]
use crate::api::path::{resolve_path, BaseDirectory};
use crate::app::{GlobalMenuEventListener, WindowMenuEvent};
use crate::{
runtime::menu::{Menu, MenuEntry, MenuHash, MenuId},
MenuEvent,
};
use serde::Serialize;
use serde_json::Value as JsonValue;
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
fmt,
fs::create_dir_all,
sync::{Arc, Mutex, MutexGuard},
};
use tauri_macros::default_runtime;
use url::Url;
const WINDOW_RESIZED_EVENT: &str = "tauri://resize";
const WINDOW_MOVED_EVENT: &str = "tauri://move";
const WINDOW_CLOSE_REQUESTED_EVENT: &str = "tauri://close-requested";
const WINDOW_DESTROYED_EVENT: &str = "tauri://destroyed";
const WINDOW_FOCUS_EVENT: &str = "tauri://focus";
const WINDOW_BLUR_EVENT: &str = "tauri://blur";
const WINDOW_SCALE_FACTOR_CHANGED_EVENT: &str = "tauri://scale-change";
const MENU_EVENT: &str = "tauri://menu";
#[default_runtime(crate::Wry, wry)]
pub struct InnerWindowManager<R: Runtime> {
windows: Mutex<HashMap<String, Window<R>>>,
pub(crate) plugins: Mutex<PluginStore<R>>,
listeners: Listeners,
pub(crate) state: Arc<StateManager>,
/// The JS message handler.
invoke_handler: Box<InvokeHandler<R>>,
/// The page load hook, invoked when the webview performs a navigation.
on_page_load: Box<OnPageLoad<R>>,
config: Arc<Config>,
assets: Arc<dyn Assets>,
default_window_icon: Option<Vec<u8>>,
package_info: PackageInfo,
/// The webview protocols protocols available to all windows.
uri_scheme_protocols: HashMap<String, Arc<CustomProtocol>>,
/// The menu set to all windows.
menu: Option<Menu>,
/// Maps runtime id to a strongly typed menu id.
menu_ids: HashMap<MenuHash, MenuId>,
/// Menu event listeners to all windows.
menu_event_listeners: Arc<Vec<GlobalMenuEventListener<R>>>,
/// Window event listeners to all windows.
window_event_listeners: Arc<Vec<GlobalWindowEventListener<R>>>,
}
impl<R: Runtime> fmt::Debug for InnerWindowManager<R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("InnerWindowManager");
#[allow(unused_mut)]
let mut w = s
.field("plugins", &self.plugins)
.field("state", &self.state)
.field("config", &self.config)
.field("default_window_icon", &self.default_window_icon)
.field("package_info", &self.package_info);
{
w = w
.field("menu", &self.menu)
.field("menu_ids", &self.menu_ids);
}
w.finish()
}
}
#[default_runtime(crate::Wry, wry)]
#[derive(Debug)]
pub struct WindowManager<R: Runtime> {
pub inner: Arc<InnerWindowManager<R>>,
invoke_keys: Arc<Mutex<Vec<u32>>>,
}
impl<R: Runtime> Clone for WindowManager<R> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
invoke_keys: self.invoke_keys.clone(),
}
}
}
fn get_menu_ids(map: &mut HashMap<MenuHash, MenuId>, menu: &Menu) {
for item in &menu.items {
match item {
MenuEntry::CustomItem(c) => {
map.insert(c.id, c.id_str.clone());
}
MenuEntry::Submenu(s) => get_menu_ids(map, &s.inner),
_ => {}
}
}
}
impl<R: Runtime> WindowManager<R> {
#[allow(clippy::too_many_arguments)]
pub(crate) fn with_handlers(
context: Context<impl Assets>,
plugins: PluginStore<R>,
invoke_handler: Box<InvokeHandler<R>>,
on_page_load: Box<OnPageLoad<R>>,
uri_scheme_protocols: HashMap<String, Arc<CustomProtocol>>,
state: StateManager,
window_event_listeners: Vec<GlobalWindowEventListener<R>>,
(menu, menu_event_listeners): (Option<Menu>, Vec<GlobalMenuEventListener<R>>),
) -> Self {
Self {
inner: Arc::new(InnerWindowManager {
windows: Mutex::default(),
plugins: Mutex::new(plugins),
listeners: Listeners::default(),
state: Arc::new(state),
invoke_handler,
on_page_load,
config: Arc::new(context.config),
assets: context.assets,
default_window_icon: context.default_window_icon,
package_info: context.package_info,
uri_scheme_protocols,
menu_ids: {
let mut map = HashMap::new();
if let Some(menu) = &menu {
get_menu_ids(&mut map, menu)
}
map
},
menu,
menu_event_listeners: Arc::new(menu_event_listeners),
window_event_listeners: Arc::new(window_event_listeners),
}),
invoke_keys: Default::default(),
}
}
/// Get a locked handle to the windows.
pub(crate) fn windows_lock(&self) -> MutexGuard<'_, HashMap<String, Window<R>>> {
self.inner.windows.lock().expect("poisoned window manager")
}
/// State managed by the application.
pub(crate) fn state(&self) -> Arc<StateManager> {
self.inner.state.clone()
}
/// Get the menu ids mapper.
pub(crate) fn menu_ids(&self) -> HashMap<MenuHash, MenuId> {
self.inner.menu_ids.clone()
}
/// Get the base path to serve data from.
///
/// * In dev mode, this will be based on the `devPath` configuration value.
/// * Otherwise, this will be based on the `distDir` configuration value.
#[cfg(custom_protocol)]
fn base_path(&self) -> &AppUrl {
&self.inner.config.build.dist_dir
}
#[cfg(dev)]
fn base_path(&self) -> &AppUrl {
&self.inner.config.build.dev_path
}
/// Get the base URL to use for webview requests.
///
/// In dev mode, this will be based on the `devPath` configuration value.
fn get_url(&self) -> Cow<'_, Url> {
match self.base_path() {
AppUrl::Url(WindowUrl::External(url)) => Cow::Borrowed(url),
_ => Cow::Owned(Url::parse("tauri://localhost").unwrap()),
}
}
fn generate_invoke_key(&self) -> u32 {
let key = rand::random();
self.invoke_keys.lock().unwrap().push(key);
key
}
/// Checks whether the invoke key is valid or not.
///
/// An invoke key is valid if it was generated by this manager instance.
pub(crate) fn verify_invoke_key(&self, key: u32) -> bool {
self.invoke_keys.lock().unwrap().contains(&key)
}
fn prepare_pending_window(
&self,
mut pending: PendingWindow<R>,
label: &str,
pending_labels: &[String],
) -> crate::Result<PendingWindow<R>> {
let is_init_global = self.inner.config.build.with_global_tauri;
let plugin_init = self
.inner
.plugins
.lock()
.expect("poisoned plugin store")
.initialization_script();
let mut webview_attributes = pending.webview_attributes;
webview_attributes = webview_attributes
.initialization_script(&self.initialization_script(&plugin_init, is_init_global))
.initialization_script(&format!(
r#"
window.__TAURI__.__windows = {window_labels_array}.map(function (label) {{ return {{ label: label }} }});
window.__TAURI__.__currentWindow = {{ label: {current_window_label} }}
"#,
window_labels_array = serde_json::to_string(pending_labels)?,
current_window_label = serde_json::to_string(&label)?,
));
#[cfg(dev)]
{
webview_attributes = webview_attributes.initialization_script(&format!(
"window.__TAURI_INVOKE_KEY__ = {}",
self.generate_invoke_key()
));
}
if !pending.window_builder.has_icon() {
if let Some(default_window_icon) = &self.inner.default_window_icon {
let icon = Icon::Raw(default_window_icon.clone());
pending.window_builder = pending.window_builder.icon(icon)?;
}
}
if !pending.window_builder.has_menu() {
if let Some(menu) = &self.inner.menu {
pending.window_builder = pending.window_builder.menu(menu.clone());
}
}
for (uri_scheme, protocol) in &self.inner.uri_scheme_protocols {
if !webview_attributes.has_uri_scheme_protocol(uri_scheme) {
let protocol = protocol.clone();
webview_attributes = webview_attributes
.register_uri_scheme_protocol(uri_scheme.clone(), move |p| (protocol.protocol)(p));
}
}
if !webview_attributes.has_uri_scheme_protocol("tauri") {
webview_attributes = webview_attributes
.register_uri_scheme_protocol("tauri", self.prepare_uri_scheme_protocol().protocol);
}
if !webview_attributes.has_uri_scheme_protocol("asset") {
webview_attributes = webview_attributes.register_uri_scheme_protocol("asset", move |url| {
let path = url.replace("asset://", "");
let path = percent_encoding::percent_decode(path.as_bytes())
.decode_utf8_lossy()
.to_string();
let data = crate::async_runtime::block_on(async move { tokio::fs::read(path).await })?;
Ok(data)
});
}
pending.webview_attributes = webview_attributes;
Ok(pending)
}
fn prepare_rpc_handler(&self, app_handle: AppHandle<R>) -> WebviewRpcHandler<R> {
let manager = self.clone();
Box::new(move |window, request| {
let window = Window::new(manager.clone(), window, app_handle.clone());
let command = request.command.clone();
let arg = request
.params
.unwrap()
.as_array_mut()
.unwrap()
.first_mut()
.unwrap_or(&mut JsonValue::Null)
.take();
match serde_json::from_value::<InvokePayload>(arg) {
Ok(message) => {
let _ = window.on_message(command, message);
}
Err(e) => {
let error: crate::Error = e.into();
let _ = window.eval(&format!(
r#"console.error({})"#,
JsonValue::String(error.to_string())
));
}
}
})
}
fn prepare_uri_scheme_protocol(&self) -> CustomProtocol {
let assets = self.inner.assets.clone();
let manager = self.clone();
CustomProtocol {
protocol: Box::new(move |path| {
let mut path = path
.split(&['?', '#'][..])
// ignore query string
.next()
.unwrap()
.to_string()
.replace("tauri://localhost", "");
if path.ends_with('/') {
path.pop();
}
path = percent_encoding::percent_decode(path.as_bytes())
.decode_utf8_lossy()
.to_string();
let path = if path.is_empty() {
// if the url is `tauri://localhost`, we should load `index.html`
"index.html".to_string()
} else {
// skip leading `/`
path.chars().skip(1).collect::<String>()
};
let is_javascript =
path.ends_with(".js") || path.ends_with(".cjs") || path.ends_with(".mjs");
let is_html = path.ends_with(".html");
let asset_response = assets
.get(&path.as_str().into())
.or_else(|| assets.get(&format!("{}/index.html", path.as_str()).into()))
.or_else(|| {
#[cfg(debug_assertions)]
eprintln!("Asset `{}` not found; fallback to index.html", path); // TODO log::error!
assets.get(&"index.html".into())
})
.ok_or(crate::Error::AssetNotFound(path))
.map(Cow::into_owned);
match asset_response {
Ok(asset) => {
if is_javascript || is_html {
let contents = String::from_utf8_lossy(&asset).into_owned();
Ok(
contents
.replacen(
"__TAURI__INVOKE_KEY_TOKEN__",
&manager.generate_invoke_key().to_string(),
1,
)
.as_bytes()
.to_vec(),
)
} else {
Ok(asset)
}
}
Err(e) => {
#[cfg(debug_assertions)]
eprintln!("{:?}", e); // TODO log::error!
Err(Box::new(e))
}
}
}),
}
}
fn prepare_file_drop(&self, app_handle: AppHandle<R>) -> FileDropHandler<R> {
let manager = self.clone();
Box::new(move |event, window| {
let manager = manager.clone();
let app_handle = app_handle.clone();
crate::async_runtime::block_on(async move {
let window = Window::new(manager.clone(), window, app_handle);
let _ = match event {
FileDropEvent::Hovered(paths) => window.emit("tauri://file-drop-hover", Some(paths)),
FileDropEvent::Dropped(paths) => window.emit("tauri://file-drop", Some(paths)),
FileDropEvent::Cancelled => window.emit("tauri://file-drop-cancelled", Some(())),
_ => unimplemented!(),
};
});
true
})
}
fn initialization_script(
&self,
plugin_initialization_script: &str,
with_global_tauri: bool,
) -> String {
let key = self.generate_invoke_key();
format!(
r#"
(function () {{
const __TAURI_INVOKE_KEY__ = {key};
{bundle_script}
}})()
{core_script}
{event_initialization_script}
if (window.rpc) {{
window.__TAURI_INVOKE__("__initialized", {{ url: window.location.href }}, {key})
}} else {{
window.addEventListener('DOMContentLoaded', function () {{
window.__TAURI_INVOKE__("__initialized", {{ url: window.location.href }}, {key})
}})
}}
{plugin_initialization_script}
"#,
key = key,
core_script = include_str!("../scripts/core.js").replace("_KEY_VALUE_", &key.to_string()),
bundle_script = if with_global_tauri {
include_str!("../scripts/bundle.js")
} else {
""
},
event_initialization_script = self.event_initialization_script(),
plugin_initialization_script = plugin_initialization_script
)
}
fn event_initialization_script(&self) -> String {
return format!(
"
window['{function}'] = function (eventData) {{
const listeners = (window['{listeners}'] && window['{listeners}'][eventData.event]) || []
for (let i = listeners.length - 1; i >= 0; i--) {{
const listener = listeners[i]
eventData.id = listener.id
listener.handler(eventData)
}}
}}
",
function = self.inner.listeners.function_name(),
listeners = self.inner.listeners.listeners_object_name()
);
}
}
#[cfg(test)]
mod test {
use super::WindowManager;
use crate::{generate_context, plugin::PluginStore, StateManager, Wry};
#[test]
fn check_get_url() {
let context = generate_context!("test/fixture/src-tauri/tauri.conf.json", crate);
let manager: WindowManager<Wry> = WindowManager::with_handlers(
context,
PluginStore::default(),
Box::new(|_| ()),
Box::new(|_, _| ()),
Default::default(),
StateManager::new(),
Default::default(),
Default::default(),
);
#[cfg(custom_protocol)]
assert_eq!(manager.get_url().to_string(), "tauri://localhost");
#[cfg(dev)]
assert_eq!(manager.get_url().to_string(), "http://localhost:4000/");
}
}
impl<R: Runtime> WindowManager<R> {
pub fn run_invoke_handler(&self, invoke: Invoke<R>) {
(self.inner.invoke_handler)(invoke);
}
pub fn run_on_page_load(&self, window: Window<R>, payload: PageLoadPayload) {
(self.inner.on_page_load)(window.clone(), payload.clone());
self
.inner
.plugins
.lock()
.expect("poisoned plugin store")
.on_page_load(window, payload);
}
pub fn extend_api(&self, invoke: Invoke<R>) {
self
.inner
.plugins
.lock()
.expect("poisoned plugin store")
.extend_api(invoke);
}
pub fn initialize_plugins(&self, app: &AppHandle<R>) -> crate::Result<()> {
self
.inner
.plugins
.lock()
.expect("poisoned plugin store")
.initialize(app, &self.inner.config.plugins)
}
pub fn prepare_window(
&self,
app_handle: AppHandle<R>,
mut pending: PendingWindow<R>,
pending_labels: &[String],
) -> crate::Result<PendingWindow<R>> {
if self.windows_lock().contains_key(&pending.label) {
return Err(crate::Error::WindowLabelAlreadyExists(pending.label));
}
let (is_local, url) = match &pending.webview_attributes.url {
WindowUrl::App(path) => {
let url = self.get_url();
(
true,
// ignore "index.html" just to simplify the url
if path.to_str() != Some("index.html") {
url
.join(&*path.to_string_lossy())
.map_err(crate::Error::InvalidUrl)?
.to_string()
} else {
url.to_string()
},
)
}
WindowUrl::External(url) => (url.scheme() == "tauri", url.to_string()),
_ => unimplemented!(),
};
if is_local {
let label = pending.label.clone();
pending = self.prepare_pending_window(pending, &label, pending_labels)?;
pending.rpc_handler = Some(self.prepare_rpc_handler(app_handle.clone()));
}
if pending.webview_attributes.file_drop_handler_enabled {
pending.file_drop_handler = Some(self.prepare_file_drop(app_handle));
}
pending.url = url;
// in `Windows`, we need to force a data_directory
// but we do respect user-specification
#[cfg(target_os = "windows")]
if pending.webview_attributes.data_directory.is_none() {
let local_app_data = resolve_path(
&self.inner.config,
&self.inner.package_info,
&self.inner.config.tauri.bundle.identifier,
Some(BaseDirectory::LocalData),
);
if let Ok(user_data_dir) = local_app_data {
pending.webview_attributes.data_directory = Some(user_data_dir);
}
}
// make sure the directory is created and available to prevent a panic
if let Some(user_data_dir) = &pending.webview_attributes.data_directory {
if !user_data_dir.exists() {
create_dir_all(user_data_dir)?;
}
}
Ok(pending)
}
pub fn attach_window(&self, app_handle: AppHandle<R>, window: DetachedWindow<R>) -> Window<R> {
let window = Window::new(self.clone(), window, app_handle);
let window_ = window.clone();
let window_event_listeners = self.inner.window_event_listeners.clone();
let manager = self.clone();
window.on_window_event(move |event| {
let _ = on_window_event(&window_, &manager, event);
for handler in window_event_listeners.iter() {
handler(GlobalWindowEvent {
window: window_.clone(),
event: event.clone(),
});
}
});
{
let window_ = window.clone();
let menu_event_listeners = self.inner.menu_event_listeners.clone();
window.on_menu_event(move |event| {
let _ = on_menu_event(&window_, &event);
for handler in menu_event_listeners.iter() {
handler(WindowMenuEvent {
window: window_.clone(),
menu_item_id: event.menu_item_id.clone(),
});
}
});
}
// insert the window into our manager
{
self
.windows_lock()
.insert(window.label().to_string(), window.clone());
}
// let plugins know that a new window has been added to the manager
{
self
.inner
.plugins
.lock()
.expect("poisoned plugin store")
.created(window.clone());
}
window
}
pub(crate) fn on_window_close(&self, label: &str) {
self.windows_lock().remove(label);
}
pub fn emit_filter<S, F>(&self, event: &str, payload: S, filter: F) -> crate::Result<()>
where
S: Serialize + Clone,
F: Fn(&Window<R>) -> bool,
{
self
.windows_lock()
.values()
.filter(|&w| filter(w))
.try_for_each(|window| window.emit(event, payload.clone()))
}
pub fn labels(&self) -> HashSet<String> {
self.windows_lock().keys().cloned().collect()
}
pub fn config(&self) -> Arc<Config> {
self.inner.config.clone()
}
pub fn package_info(&self) -> &PackageInfo {
&self.inner.package_info
}
pub fn unlisten(&self, handler_id: EventHandler) {
self.inner.listeners.unlisten(handler_id)
}
pub fn trigger(&self, event: &str, window: Option<String>, data: Option<String>) {
self.inner.listeners.trigger(event, window, data)
}
pub fn listen<F: Fn(Event) + Send + 'static>(
&self,
event: String,
window: Option<String>,
handler: F,
) -> EventHandler {
self.inner.listeners.listen(event, window, handler)
}
pub fn once<F: Fn(Event) + Send + 'static>(
&self,
event: String,
window: Option<String>,
handler: F,
) -> EventHandler {
self.inner.listeners.once(event, window, handler)
}
pub fn event_listeners_object_name(&self) -> String {
self.inner.listeners.listeners_object_name()
}
pub fn event_emit_function_name(&self) -> String {
self.inner.listeners.function_name()
}
pub fn get_window(&self, label: &str) -> Option<Window<R>> {
self.windows_lock().get(label).cloned()
}
pub fn windows(&self) -> HashMap<String, Window<R>> {
self.windows_lock().clone()
}
}
fn on_window_event<R: Runtime>(
window: &Window<R>,
manager: &WindowManager<R>,
event: &WindowEvent,
) -> crate::Result<()> {
match event {
WindowEvent::Resized(size) => window.emit(WINDOW_RESIZED_EVENT, Some(size))?,
WindowEvent::Moved(position) => window.emit(WINDOW_MOVED_EVENT, Some(position))?,
WindowEvent::CloseRequested => {
window.emit(WINDOW_CLOSE_REQUESTED_EVENT, Some(()))?;
}
WindowEvent::Destroyed => {
window.emit(WINDOW_DESTROYED_EVENT, Some(()))?;
let label = window.label();
for window in manager.inner.windows.lock().unwrap().values() {
window.eval(&format!(
r#"window.__TAURI__.__windows = window.__TAURI__.__windows.filter(w => w.label !== "{}");"#,
label
))?;
}
}
WindowEvent::Focused(focused) => window.emit(
if *focused {
WINDOW_FOCUS_EVENT
} else {
WINDOW_BLUR_EVENT
},
Some(()),
)?,
WindowEvent::ScaleFactorChanged {
scale_factor,
new_inner_size,
..
} => window.emit(
WINDOW_SCALE_FACTOR_CHANGED_EVENT,
Some(ScaleFactorChanged {
scale_factor: *scale_factor,
size: *new_inner_size,
}),
)?,
_ => unimplemented!(),
}
Ok(())
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct ScaleFactorChanged {
scale_factor: f64,
size: PhysicalSize<u32>,
}
fn on_menu_event<R: Runtime>(window: &Window<R>, event: &MenuEvent) -> crate::Result<()> {
window.emit(MENU_EVENT, Some(event.menu_item_id.clone()))
}
| 30.146944 | 115 | 0.614459 |
b9d3529bf1dd82688a838b947d27ba78efe19591 | 1,362 | use crate::prelude::*;
#[doc = include_str!("../doc/request-info.md")]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Info {
/// The Tendermint software semantic version.
pub version: String,
/// The Tendermint block protocol version.
pub block_version: u64,
/// The Tendermint p2p protocol version.
pub p2p_version: u64,
/// The Tendermint ABCI semantic version.
pub abci_version: String,
}
// =============================================================================
// Protobuf conversions
// =============================================================================
use core::convert::TryFrom;
use tendermint_proto::{abci as pb, Protobuf};
impl From<Info> for pb::RequestInfo {
fn from(info: Info) -> Self {
Self {
version: info.version,
block_version: info.block_version,
p2p_version: info.p2p_version,
abci_version: info.abci_version,
}
}
}
impl TryFrom<pb::RequestInfo> for Info {
type Error = crate::Error;
fn try_from(info: pb::RequestInfo) -> Result<Self, Self::Error> {
Ok(Self {
version: info.version,
block_version: info.block_version,
p2p_version: info.p2p_version,
abci_version: info.abci_version,
})
}
}
impl Protobuf<pb::RequestInfo> for Info {}
| 27.795918 | 80 | 0.552863 |
76551b9671346c7fdc525645b349ea8647ef7c4d | 36 | pub fn foo<T>(x:T) -> T{
return x
} | 12 | 24 | 0.527778 |
645a228458d828fc2187a6d280adcff4db1b2567 | 10,814 | use std::cmp::{PartialOrd, Ordering};
use super::node::Node;
use super::ArgNames;
/// Describes the lifetime of a variable.
/// When a lifetime `a` > `b` it means `a` outlives `b`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Lifetime {
/// Return value with optional list of arguments that outlives other arguments.
Return(Vec<usize>),
/// An argument outlives other arguments, but does not outlive the return.
Argument(Vec<usize>),
/// Local variable.
Local(usize),
/// Current variable.
///
/// Is equal to itself and outlives local variables.
///
/// Unknown to return because lifetime checker infers
/// lifetime or return value from argument, which does not work
/// with current objects.
Current(usize),
}
impl PartialOrd for Lifetime {
fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
use self::Lifetime::*;
Some(match (self, other) {
(&Current(_), &Local(_)) => Ordering::Greater,
(&Local(_), &Current(_)) => Ordering::Less,
(&Current(a), &Current(b)) if a == b => Ordering::Equal,
(&Current(_), _) => return None,
(_, &Current(_)) => return None,
(&Local(a), &Local(b)) => b.cmp(&a),
(&Return(_), &Local(_)) => Ordering::Greater,
(&Local(_), &Return(_)) => Ordering::Less,
(&Return(ref a), &Return(ref b)) => {
match (a.len(), b.len()) {
(0, 0) => Ordering::Equal,
(0, _) => Ordering::Less,
(_, 0) => Ordering::Greater,
(_, _) => {
return compare_argument_outlives(a, b);
}
}
}
(&Argument(_), &Local(_)) => Ordering::Greater,
(&Local(_), &Argument(_)) => Ordering::Less,
(&Return(_), &Argument(_)) => return None,
(&Argument(_), &Return(_)) => return None,
(&Argument(ref a), &Argument(ref b)) => {
return compare_argument_outlives(a, b);
}
})
}
}
/// Takes two lists of arguments.
/// If they have any argument in common, the longer list outlives the shorter.
/// If they have no argument in common, it is not known whether one outlives
/// the other.
fn compare_argument_outlives(a: &[usize], b: &[usize]) -> Option<Ordering> {
for &i in a {
for &j in b {
if i == j {
return Some(a.len().cmp(&b.len()));
}
}
}
None
}
/// Gets the lifetime of a function argument.
pub(crate) fn arg_lifetime(
declaration: usize,
arg: &Node,
nodes: &[Node],
arg_names: &ArgNames
) -> Option<Lifetime> {
Some(if let Some(ref lt) = arg.lifetime {
if &**lt == "return" {
return Some(Lifetime::Return(vec![declaration]));
} else {
// Resolve lifetimes among arguments.
let parent = arg.parent.expect("Expected parent");
let mut args: Vec<usize> = vec![];
args.push(declaration);
let mut name = lt.clone();
loop {
let (arg, _) = *arg_names.get(&(parent, name))
.expect("Expected argument name");
args.push(arg);
if let Some(ref lt) = nodes[arg].lifetime {
if &**lt == "return" {
// Lifetimes outlive return.
return Some(Lifetime::Return(args));
}
name = lt.clone();
} else {
break;
}
}
Lifetime::Argument(args)
}
} else {
Lifetime::Argument(vec![declaration])
})
}
pub(crate) fn compare_lifetimes(
l: &Option<Lifetime>,
r: &Option<Lifetime>,
nodes: &[Node]
) -> Result<(), String> {
match (l, r) {
(&Some(ref l), &Some(ref r)) => {
match l.partial_cmp(&r) {
Some(Ordering::Greater) | Some(Ordering::Equal) => {
match *r {
Lifetime::Local(r) => {
// This gets triggered in cases like these:
/*
fn main() {
a := [[]]
b := [3] // <--- declared after 'a'.
a[0] = b // <--- attempting to put 'b' inside 'a'.
}
*/
return Err(format!("`{}` does not live long enough",
nodes[r].name().expect("Expected name")));
}
Lifetime::Argument(ref r) => {
// This gets triggered in cases like these:
/*
fn main() {}
fn foo(mut a: 'b, b) {
a = b // <--- attempting to overwrite 'a' with 'b'.
}
*/
// It is known that `a` outlives `b`, so it is an error
// to attempt overwrite `a` with `b`.
// Notice that `b: 'b` is required to trigger the case,
// since `a` and `b` must have some lifetime in common to get an order.
return Err(format!("`{}` does not live long enough",
nodes[r[0]].name().expect("Expected name")));
}
Lifetime::Current(r) => {
// This gets triggered in cases like these:
/*
fn main() {}
fn foo() ~ a {
a = a // <--- attempting to overwrite 'a' with itself.
}
*/
// Since current objects are not permitted to have lifetimes,
// this is the only case where there is an order,
// so this can only happen when attempting to overwrite
// the same current object with itself.
// For this reason, this case is rarely seen in practice.
return Err(format!("`{}` does not live long enough",
nodes[r].name().expect("Expected name")));
}
Lifetime::Return(ref r) => {
// This gets triggered in cases like these:
/*
fn main() {}
fn foo(mut a: 'b, b: 'return) {
a = b // <--- attempting to overwrite 'a' with 'b'.
}
*/
// It is known that `a` outlives `b`, so it is an error
// to attempt overwrite `a` with `b`.
// Notice that `b: 'b` is required to trigger the case,
// since `a` and `b` must have some lifetime in common to get an order.
// In addition they both need `'return` lifetime.
// `a` has an implicit `'return` lifetime through `b`.
return Err(format!("`{}` does not live long enough",
nodes[r[0]].name().expect("Expected name")));
}
}
}
None => {
match (l, r) {
(&Lifetime::Argument(ref l), &Lifetime::Argument(ref r)) => {
// TODO: Report function name for other cases.
let func = nodes[nodes[r[0]].parent.unwrap()]
.name().unwrap();
return Err(format!("Function `{}` requires `{}: '{}`",
func,
nodes[r[0]].name().expect("Expected name"),
nodes[l[0]].name().expect("Expected name")));
}
(&Lifetime::Argument(ref l), &Lifetime::Return(ref r)) => {
if !r.is_empty() {
return Err(format!("Requires `{}: '{}`",
nodes[r[0]].name().expect("Expected name"),
nodes[l[0]].name().expect("Expected name")));
} else {
unimplemented!();
}
}
(&Lifetime::Return(ref l), &Lifetime::Return(ref r)) => {
if !l.is_empty() && !r.is_empty() {
return Err(format!("Requires `{}: '{}`",
nodes[r[0]].name().expect("Expected name"),
nodes[l[0]].name().expect("Expected name")));
} else {
unimplemented!();
}
}
(&Lifetime::Return(ref l), &Lifetime::Argument(ref r)) => {
if l.is_empty() {
let last = *r.last().expect("Expected argument index");
return Err(format!("Requires `{}: 'return`",
nodes[last].name().expect("Expected name")));
} else {
return Err(format!("`{}` does not live long enough",
nodes[r[0]].name().expect("Expected name")));
}
}
(&Lifetime::Current(n), _) => {
return Err(format!("`{}` is a current object, use `clone(_)`",
nodes[n].name().expect("Expected name")));
}
(_, &Lifetime::Current(n)) => {
return Err(format!("`{}` is a current object, use `clone(_)`",
nodes[n].name().expect("Expected name")));
}
x => panic!("Unknown case {:?}", x)
}
}
_ => {}
}
}
// TODO: Handle other cases.
_ => {}
}
Ok(())
}
| 44.871369 | 99 | 0.391252 |
eb9be2bcd5080e4ba18dd5bf93b4a3cad2ee9635 | 256 | use crate::error::Result;
/// Returns true if an address is executable.
pub fn is_executable_address(address: *const ()) -> Result<bool> {
Ok(
region::query(address as *const _)?
.protection
.contains(region::Protection::Execute),
)
}
| 23.272727 | 66 | 0.664063 |
76d58a969c59f9d3fd2167d5f61f0adf49932bd1 | 3,713 | #[doc = "Reader of register WTMK_LVL"]
pub type R = crate::R<u32, super::WTMK_LVL>;
#[doc = "Writer for register WTMK_LVL"]
pub type W = crate::W<u32, super::WTMK_LVL>;
#[doc = "Register WTMK_LVL `reset()`'s with value 0x0810_0810"]
impl crate::ResetValue for super::WTMK_LVL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0810_0810
}
}
#[doc = "Reader of field `RD_WML`"]
pub type RD_WML_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RD_WML`"]
pub struct RD_WML_W<'a> {
w: &'a mut W,
}
impl<'a> RD_WML_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
#[doc = "Reader of field `RD_BRST_LEN`"]
pub type RD_BRST_LEN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RD_BRST_LEN`"]
pub struct RD_BRST_LEN_W<'a> {
w: &'a mut W,
}
impl<'a> RD_BRST_LEN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 8)) | (((value as u32) & 0x1f) << 8);
self.w
}
}
#[doc = "Reader of field `WR_WML`"]
pub type WR_WML_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WR_WML`"]
pub struct WR_WML_W<'a> {
w: &'a mut W,
}
impl<'a> WR_WML_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16);
self.w
}
}
#[doc = "Reader of field `WR_BRST_LEN`"]
pub type WR_BRST_LEN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WR_BRST_LEN`"]
pub struct WR_BRST_LEN_W<'a> {
w: &'a mut W,
}
impl<'a> WR_BRST_LEN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 24)) | (((value as u32) & 0x1f) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Read Watermark Level"]
#[inline(always)]
pub fn rd_wml(&self) -> RD_WML_R {
RD_WML_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:12 - Read Burst Length Due to system restriction, the actual burst length may not exceed 16."]
#[inline(always)]
pub fn rd_brst_len(&self) -> RD_BRST_LEN_R {
RD_BRST_LEN_R::new(((self.bits >> 8) & 0x1f) as u8)
}
#[doc = "Bits 16:23 - Write Watermark Level"]
#[inline(always)]
pub fn wr_wml(&self) -> WR_WML_R {
WR_WML_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:28 - Write Burst Length Due to system restriction, the actual burst length may not exceed 16."]
#[inline(always)]
pub fn wr_brst_len(&self) -> WR_BRST_LEN_R {
WR_BRST_LEN_R::new(((self.bits >> 24) & 0x1f) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Read Watermark Level"]
#[inline(always)]
pub fn rd_wml(&mut self) -> RD_WML_W {
RD_WML_W { w: self }
}
#[doc = "Bits 8:12 - Read Burst Length Due to system restriction, the actual burst length may not exceed 16."]
#[inline(always)]
pub fn rd_brst_len(&mut self) -> RD_BRST_LEN_W {
RD_BRST_LEN_W { w: self }
}
#[doc = "Bits 16:23 - Write Watermark Level"]
#[inline(always)]
pub fn wr_wml(&mut self) -> WR_WML_W {
WR_WML_W { w: self }
}
#[doc = "Bits 24:28 - Write Burst Length Due to system restriction, the actual burst length may not exceed 16."]
#[inline(always)]
pub fn wr_brst_len(&mut self) -> WR_BRST_LEN_W {
WR_BRST_LEN_W { w: self }
}
}
| 32.858407 | 116 | 0.584702 |
5d7df87f453ab8fe45cd804117854ecabe64c1fb | 5,203 | // conn_expr/port_set.rs
// Copyright 2022 Matti Hänninen
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
use std::str;
use super::parser::{self, Parser};
pub type Port = u16;
/// Non-empty ordered set of ports
#[derive(Clone, Debug, PartialEq)]
pub struct PortSet(Vec<Port>);
impl PortSet {
pub fn try_from_iter(iter: impl Iterator<Item = Port>) -> Option<PortSet> {
let mut ports = vec![];
for port in iter {
if !ports.contains(&port) {
ports.push(port)
}
}
if ports.is_empty() {
None
} else {
Some(Self(ports))
}
}
pub fn as_slice(&self) -> &[Port] {
self.0.as_slice()
}
pub fn into_inner(self) -> Vec<Port> {
self.0
}
}
impl<'a> TryFrom<parser::Pair<'a, parser::Rule>> for PortSet {
type Error = CannotConvertToPortSetError;
fn try_from(
pair: parser::Pair<'a, parser::Rule>,
) -> Result<Self, Self::Error> {
use parser::Rule;
if matches!(pair.as_rule(), Rule::port_set) {
let mut ports = vec![];
for p in pair.into_inner() {
match p.as_rule() {
Rule::port => ports.push(
p.as_str()
.parse()
.map_err(|_| CannotConvertToPortSetError)?,
),
Rule::port_range => {
let mut limits = p.into_inner();
let start: Port = limits
.next()
.expect("grammar guarantees start port")
.as_str()
.parse()
.map_err(|_| CannotConvertToPortSetError)?;
let end = limits
.next()
.expect("grammar guarantees end port")
.as_str()
.parse()
.map_err(|_| CannotConvertToPortSetError)?;
if start <= end {
ports.extend(start..=end);
} else {
ports.extend((end..=start).rev());
}
}
_ => unreachable!("grammar guarantees port or port_range"),
}
}
Ok(Self::try_from_iter(ports.into_iter())
.expect("grammar guarantees at least one port"))
} else {
Err(CannotConvertToPortSetError)
}
}
}
#[derive(Debug, PartialEq, thiserror::Error)]
#[error("cannot convert to port set")]
pub struct CannotConvertToPortSetError;
impl str::FromStr for PortSet {
type Err = PortSetParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
parser::ConnectionExprLanguage::parse(parser::Rule::port_set_expr, s)
.map_err(|_| PortSetParseError)?
.next()
.expect("grammar guaranteed post_set_expr")
.into_inner()
.next()
.expect("grammar guarantees post_set")
.try_into()
.map_err(|_| PortSetParseError)
}
}
#[derive(Debug, PartialEq, thiserror::Error)]
#[error("cannot parse port set expression")]
pub struct PortSetParseError;
#[cfg(test)]
mod test {
use super::*;
#[test]
fn port_set_parsing() {
assert_eq!("1".parse(), Ok(PortSet(vec![1])));
assert_eq!("65535".parse(), Ok(PortSet(vec![65535])));
assert_eq!("1,2".parse(), Ok(PortSet(vec![1, 2])));
assert_eq!("1-3".parse(), Ok(PortSet(vec![1, 2, 3])));
assert_eq!("3-1".parse(), Ok(PortSet(vec![3, 2, 1])));
assert_eq!("1,1-2,5,2-4".parse(), Ok(PortSet(vec![1, 2, 5, 3, 4])));
assert_eq!("".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!(" 1".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("1 ".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!(",".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!(",1".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("1,".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("-1".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("1-".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("1,,2".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("1--2".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("1-2-3".parse::<PortSet>(), Err(PortSetParseError));
assert_eq!("65536".parse::<PortSet>(), Err(PortSetParseError));
}
}
| 35.394558 | 80 | 0.52758 |
090aaaa49067152d4929890be97bc7c05dd289dd | 1,454 | use std::{collections::VecDeque, vec};
fn lanternfish_evolution(input: &str, time_horizon: u16) -> u64 {
let start_lives = input.split(',');
let mut starting_lifetimes_count = vec![0u64; 9];
for lifetime in start_lives {
let life_num: usize = lifetime
.trim()
.parse()
.expect("We got a string in the CSV that's not a number");
starting_lifetimes_count[life_num] += 1;
}
let mut lives_count: VecDeque<u64> = starting_lifetimes_count.into_iter().collect();
for _ in 0..time_horizon {
lives_count.rotate_left(1);
lives_count[6] += lives_count[8];
}
lives_count.into_iter().sum()
}
pub fn part1(input: &str) {
let final_population = lanternfish_evolution(input, 80);
println!("Final population: {}", final_population);
}
pub fn part2(input: &str) {
let final_population = lanternfish_evolution(input, 256);
println!("Final population: {}", final_population);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn normal_conditions() {
let input_string = "3,4,3,1,2";
let population_count = lanternfish_evolution(input_string, 80);
assert_eq!(population_count, 5934u64);
}
#[test]
fn infinite_resources() {
let input_string = "3,4,3,1,2";
let population_count = lanternfish_evolution(input_string, 256);
assert_eq!(population_count, 26984457539u64);
}
}
| 25.508772 | 88 | 0.635488 |
eddf2c009a88f5de15a03ce0c30091bb9b9a857d | 20,483 | /// Block manager manipulates blocks.
/// Block manager loads blocks into buffer from main data store, version store, or checkpoint
/// store.
use crate::common::errors::Error;
use crate::common::defs::BlockId;
use crate::storage::datastore::DataStore;
use crate::storage::datastore::FileDesc;
use crate::storage::datastore::FileType;
use crate::storage::datastore::FileState;
use crate::system::config::ConfigMt;
use crate::buf_mgr::buf_mgr::BufMgr;
use crate::buf_mgr::buf_mgr::BlockDesc;
use crate::buf_mgr::buf_mgr::Pinned;
use crate::buf_mgr::buf_mgr::BlockArea;
use crate::buf_mgr::buf_mgr::BlockType;
use crate::buf_mgr::lru::LruList;
use crate::buf_mgr::lru::LruNodeRef;
use crate::block_mgr::block::BasicBlock;
use crate::block_mgr::block::FileHeaderBlock;
use crate::block_mgr::block::ExtentHeaderBlock;
use crate::block_mgr::block::FreeInfoBlock;
use crate::block_mgr::block::DataBlock;
use crate::block_mgr::block::BlockLocked;
use crate::block_mgr::block::BlockLockedMut;
use crate::block_mgr::block::RwLockGuard;
use std::sync::Arc;
use std::sync::RwLock;
use std::time::Duration;
use std::ops::DerefMut;
// if no buffer space available then wait for this amount of time before the next try
const WAIT_WRITER_THREADS_MS: u64 = 100;
pub struct BlockMgr {
locks: Arc<Vec<RwLock<()>>>,
buf_mgr: Arc<BufMgr<LruNodeRef<(usize, BlockId)>, LruList<(usize, BlockId)>>>,
ds: DataStore,
}
impl BlockMgr {
pub fn new(conf: ConfigMt) -> Result<BlockMgr, Error> {
let block_mgr_n_lock = *conf.get_conf().get_block_mgr_n_lock();
let block_buf_size = *conf.get_conf().get_block_buf_size();
let mut locks = Vec::with_capacity(block_mgr_n_lock as usize);
for _ in 0..block_mgr_n_lock {
locks.push(RwLock::new(()));
}
let ds = DataStore::new(conf.clone())?;
let block_size = ds.get_block_size();
let buf_mgr = Arc::new(BufMgr::new(block_size, block_buf_size as usize / block_size)?);
Ok(BlockMgr {
locks: Arc::new(locks),
buf_mgr,
ds,
})
}
/// Lock for reading and return data block.
pub fn get_block(&self, block_id: &BlockId) -> Result<BlockLocked<DataBlock>, Error> {
self.get_block_for_read::<DataBlock>(block_id, DataBlock::new, 0)
}
/// Lock for reading and return data block.
pub fn get_versioning_block(&self, block_id: &BlockId) -> Result<BlockLocked<DataBlock>, Error> {
self.get_block_for_read::<DataBlock>(block_id, DataBlock::new, 1)
}
/// Lock for reading and return file-level header block.
pub fn get_file_header_block(&self, block_id: &BlockId) -> Result<BlockLocked<FileHeaderBlock>, Error> {
self.get_block_for_read::<FileHeaderBlock>(block_id, FileHeaderBlock::new, 0)
}
/// Lock for reading and return extent-level header block.
pub fn get_extent_header_block(&self, block_id: &BlockId) -> Result<BlockLocked<ExtentHeaderBlock>, Error> {
self.get_block_for_read::<ExtentHeaderBlock>(block_id, ExtentHeaderBlock::new, 0)
}
/// Lock for reading and return a block containing free info bitmap section which didn't fint
/// into header block.
pub fn get_free_info_block(&self, block_id: &BlockId) -> Result<BlockLocked<FreeInfoBlock>, Error> {
self.get_block_for_read::<FreeInfoBlock>(block_id, FreeInfoBlock::new, 0)
}
/// Lock for writing and return data block.
pub fn get_block_mut(&self, block_id: &BlockId) -> Result<BlockLockedMut<DataBlock>, Error> {
self.get_block_for_write::<DataBlock>(block_id, DataBlock::new, false, 0)
}
/// Lock for writing and return data block.
pub fn get_versioning_block_mut(&self, block_id: &BlockId) -> Result<BlockLockedMut<DataBlock>, Error> {
self.get_block_for_write::<DataBlock>(block_id, DataBlock::new, false, 1)
}
/// Lock for writing and return file-level header block.
pub fn get_file_header_block_mut(&self, block_id: &BlockId) -> Result<BlockLockedMut<FileHeaderBlock>, Error> {
self.get_block_for_write::<FileHeaderBlock>(block_id, FileHeaderBlock::new, false, 0)
}
/// Lock for writing and return extent-level header block.
pub fn get_extent_header_block_mut(&self, block_id: &BlockId) -> Result<BlockLockedMut<ExtentHeaderBlock>, Error> {
self.get_block_for_write::<ExtentHeaderBlock>(block_id, ExtentHeaderBlock::new, false, 0)
}
/// Lock for writing and return a block containing free info bitmap section which didn't fint
/// into header block.
pub fn get_free_info_block_mut(&self, block_id: &BlockId) -> Result<BlockLockedMut<FreeInfoBlock>, Error> {
self.get_block_for_write::<FreeInfoBlock>(block_id, FreeInfoBlock::new, false, 0)
}
/// Return data block pinned, but not locked, dirty flag is not changed.
pub fn get_block_mut_no_lock(&self, block_id: &BlockId) -> Result<DataBlock, Error> {
self.get_block_for_write_no_lock::<DataBlock>(block_id, DataBlock::new)
}
/// Return extent header block pinned, but not locked, dirty flag is not changed.
pub fn get_extent_header_block_mut_no_lock(&self, block_id: &BlockId) -> Result<ExtentHeaderBlock, Error> {
self.get_block_for_write_no_lock::<ExtentHeaderBlock>(block_id, ExtentHeaderBlock::new)
}
// lock and return block of type T
fn get_block_for_read<'b, T>(&'b self, block_id: &BlockId, init_fun: fn(BlockId, usize, Pinned<'b, BlockArea>) -> T, lock_type: usize) -> Result<BlockLocked<T>, Error>
where T: BasicBlock
{
let lid = block_id.hash(self.locks.len()/2)*2 + lock_type;
let lock_holder = RwLockGuard::Read(self.locks[lid].read().unwrap());
if let Some((data, buf_idx)) = self.buf_mgr.get_block(&block_id) {
Ok(BlockLocked::new(lock_holder, init_fun(*block_id, buf_idx, data)))
} else {
let block_type = self.determine_block_type(&block_id);
let ds_data = self.ds.load_block(&block_id, FileState::InUse)?;
let (mut data, buf_idx) = self.allocate_on_cache(*block_id, block_type);
data.copy_from_slice(&ds_data);
return Ok(BlockLocked::new(lock_holder, init_fun(*block_id, buf_idx, data)))
}
}
pub fn get_file_desc(&self, file_id: u16) -> Option<FileDesc> {
self.ds.get_file_desc(file_id)
}
// lock and return block of type T suitable for modification;
// if try_lock is true then first try locking, Error will be returned if block is blocked.
pub fn get_block_for_write<'b, T>(&'b self, block_id: &BlockId, init_fun: fn(BlockId, usize, Pinned<'b, BlockArea>) -> T, try_lock: bool, lock_type: usize) -> Result<BlockLockedMut<T>, Error>
where T: BasicBlock
{
let lid = block_id.hash(self.locks.len()/2)*2 + lock_type;
let lock_holder = if try_lock {
RwLockGuard::Write(self.locks[lid].try_write().map_err(|_| Error::try_lock_error())?)
} else {
RwLockGuard::Write(self.locks[lid].write().unwrap())
};
if let Some((data, buf_idx)) = self.buf_mgr.get_block(&block_id) {
self.set_dirty(buf_idx, true);
Ok(BlockLockedMut::new(BlockLocked::new(lock_holder, init_fun(*block_id, buf_idx, data))))
} else {
let block_type = self.determine_block_type(&block_id);
let ds_data = self.ds.load_block(&block_id, FileState::InUse)?;
let (mut data, buf_idx) = self.allocate_on_cache(*block_id, block_type);
data.copy_from_slice(&ds_data);
self.set_dirty(buf_idx, true);
Ok(BlockLockedMut::new(BlockLocked::new(lock_holder, init_fun(*block_id, buf_idx, data))))
}
}
fn get_block_for_write_no_lock<'b, T>(&'b self, block_id: &BlockId, init_fun: fn(BlockId, usize, Pinned<'b, BlockArea>) -> T) -> Result<T, Error> {
if let Some((data, buf_idx)) = self.buf_mgr.get_block(&block_id) {
Ok(init_fun(*block_id, buf_idx, data))
} else {
let block_type = self.determine_block_type(&block_id);
let ds_data = self.ds.load_block(&block_id, FileState::InUse)?;
let (mut data, buf_idx) = self.allocate_on_cache(*block_id, block_type);
data.copy_from_slice(&ds_data);
Ok(init_fun(*block_id, buf_idx, data))
}
}
fn determine_block_type(&self, block_id: &BlockId) -> BlockType {
let file_desc = self.ds.get_file_desc(block_id.file_id).unwrap();
match file_desc.file_type {
FileType::DataStoreFile => BlockType::DataBlock,
FileType::CheckpointStoreFile => BlockType::CheckpointBlock,
FileType::VersioningStoreFile => BlockType::VersionBlock,
}
}
pub fn block_fill_size(&self) -> usize {
self.ds.block_fill_size()
}
/// Return iterator over blocks in the buffer.
pub fn get_iter(&self) -> BlockIterator {
BlockIterator::new(&self.buf_mgr)
}
/// Return block from buffer by index.
pub fn get_block_by_idx(&self, id: usize, block_id: BlockId, block_type: BlockType) -> Option<BlockLockedMut<DataBlock>> {
let lid = block_id.hash(self.locks.len()/2)*2 + if block_type == BlockType::VersionBlock {1} else {0};
let lock_holder = RwLockGuard::Write(self.locks[lid].write().unwrap());
if let Some(data) = self.buf_mgr.get_block_by_idx(id) {
let bdesc = self.buf_mgr.get_bdesc_by_idx(id).unwrap();
Some(BlockLockedMut::new(BlockLocked::new(lock_holder, DataBlock::new(bdesc.block_id, id, data))))
} else {
None
}
}
/// Write block to disk.
pub fn write_block(&self, block: &mut DataBlock) -> Result<(), Error> {
self.ds.write_block(block, FileState::InUse)
}
fn allocate_on_cache(&self, block_id: BlockId, block_type: BlockType) -> (Pinned<BlockArea>, usize) {
loop {
if let Some((data, buf_idx)) = self.buf_mgr.allocate_on_cache(&block_id, block_type) {
return (data, buf_idx)
} else {
// wait for some time
std::thread::sleep(Duration::from_millis(WAIT_WRITER_THREADS_MS));
}
}
}
/// Take a free block from the buffer, assign the specified block_id, for it and return. Dirty
/// flag is not changed.
pub fn allocate_on_cache_mut_no_lock(&self, block_id: BlockId, block_type: BlockType) -> Result<DataBlock, Error> {
let (mut data, buf_idx) = self.allocate_on_cache(block_id, block_type);
for b in data.deref_mut().deref_mut() {*b = 0;};
Ok(DataBlock::new(block_id, buf_idx, data))
}
/// Clear and fill file_desc_set with file descriptions of data files.
pub fn get_data_files(&self, file_desc: &mut Vec<FileDesc>) {
self.ds.get_data_files(file_desc);
}
/// Clear and fill file_desc_set with file descriptions of versioning store files.
pub fn get_versioning_files(&self, file_desc: &mut Vec<FileDesc>) {
self.ds.get_versioning_files(file_desc);
}
/// Clear and fill file_desc_set with file descriptions of checkpoint store files.
pub fn get_checkpoint_files(&self, file_desc: &mut Vec<FileDesc>) {
self.ds.get_checkpoint_files(file_desc);
}
/// Mark the block in buffer if checkpoint block is written to disk or not.
pub fn set_checkpoint_written(&self, desc_id: usize, state: bool) {
self.buf_mgr.set_checkpoint_written(desc_id, state);
}
/// Mark the block in buffer if checkpoint block is written to disk or not.
pub fn set_checkpoint_block_id(&self, desc_id: usize, block_id: BlockId) {
self.buf_mgr.set_checkpoint_block_id(desc_id, block_id);
}
/// Mark the block in buffer if if it is dirty or not.
pub fn set_dirty(&self, desc_id: usize, state: bool) {
self.buf_mgr.set_dirty(desc_id, state);
}
/// add extent to a file in the datastore.
pub fn add_extent(&self, file_id: u16) -> Result<(), Error> {
self.ds.add_extent(file_id, FileState::InUse)
}
/// Clone
pub fn clone(&self) -> Result<Self, Error> {
Ok(BlockMgr {
locks: self.locks.clone(),
buf_mgr: self.buf_mgr.clone(),
ds: self.ds.clone()?,
})
}
/// Return block descriptor by block index in buffer.
pub fn get_block_desc(&self, idx: usize) -> Option<BlockDesc> {
self.buf_mgr.get_bdesc_by_idx(idx)
}
pub fn get_block_size(&self) -> usize {
self.ds.get_block_size()
}
/// Add a new file to datastore.
pub fn add_datafile(&self, file_type: FileType, extent_size: u16, extent_num: u16, max_extent_num: u16) -> Result<u16, Error> {
self.ds.add_datafile(file_type, extent_size, extent_num, max_extent_num)
}
}
/// Itarator over blocks residing in the buffer.
pub struct BlockIterator<'a> {
buf_mgr: &'a BufMgr<LruNodeRef<(usize, BlockId)>, LruList<(usize, BlockId)>>,
idx: usize
}
impl<'a> BlockIterator<'a> {
fn new(buf_mgr: &'a BufMgr<LruNodeRef<(usize, BlockId)>, LruList<(usize, BlockId)>>) -> Self {
BlockIterator {
buf_mgr,
idx: 0,
}
}
pub fn next(&mut self) -> Option<BlockDesc> {
let ret = self.buf_mgr.get_bdesc_by_idx(self.idx);
self.idx += 1;
ret
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
use std::ops::Deref;
use crate::block_mgr::block::FreeInfoHeaderSection;
use crate::block_mgr::block::FreeInfoSection;
fn init_datastore(dspath: &str, block_size: usize) -> Vec<FileDesc> {
let mut fdset = vec![];
let desc1 = FileDesc {
state: FileState::InUse,
file_id: 3,
extent_size: 16,
extent_num: 3,
max_extent_num: 65500,
file_type: FileType::DataStoreFile,
};
let desc2 = FileDesc {
state: FileState::InUse,
file_id: 4,
extent_size: 10,
extent_num: 3,
max_extent_num: 65500,
file_type: FileType::VersioningStoreFile,
};
let desc3 = FileDesc {
state: FileState::InUse,
file_id: 5,
extent_size: 10,
extent_num: 3,
max_extent_num: 65500,
file_type: FileType::CheckpointStoreFile,
};
fdset.push(desc1);
fdset.push(desc2);
fdset.push(desc3);
DataStore::initialize_datastore(dspath, block_size, &fdset).expect("Failed to init datastore");
fdset
}
#[test]
fn test_block_mgr() {
let dspath = "/tmp/test_block_mgr_5689394";
let block_size = 8192;
let block_num = 100;
if Path::new(&dspath).exists() {
std::fs::remove_dir_all(&dspath).expect("Failed to delete test dir on cleanup");
}
std::fs::create_dir(&dspath).expect("Failed to create test dir");
let conf = ConfigMt::new();
let mut c = conf.get_conf();
c.set_datastore_path(dspath.to_owned());
c.set_block_mgr_n_lock(10);
c.set_block_buf_size(block_num*block_size as u64);
drop(c);
let init_fdesc = init_datastore(dspath, block_size);
let block_mgr = BlockMgr::new(conf.clone()).expect("Failed to create instance");
let entry_id = 0;
let entry_sz = 501;
let full_cnt = 0;
let someval = 123u8;
let block_id1 = BlockId::init(3,1,5);
let block_id2 = BlockId::init(3,0,0);
let block_id3 = BlockId::init(3,1,0);
let block_id4 = BlockId::init(3,0,1);
let block_id5 = BlockId::init(3,1,4);
let block_id6 = BlockId::init(1,1,1);
let mut block1 = block_mgr.get_block_mut(&block_id1).expect("failed to get block");
assert!(block1.get_entry(entry_id).is_err());
block1.add_entry(entry_sz);
drop(block1);
let mut block2 = block_mgr.get_file_header_block_mut(&block_id2).expect("failed to get block");
assert_eq!(block2.get_full_cnt(), 1);
block2.set_full_cnt(full_cnt);
drop(block2);
let mut block3 = block_mgr.get_extent_header_block_mut(&block_id3).expect("failed to get block");
assert_eq!(block3.get_full_cnt(), 1);
block3.set_full_cnt(full_cnt);
drop(block3);
let mut block4 = block_mgr.get_free_info_block_mut(&block_id4).expect("failed to get block");
assert_eq!(block4.fi_slice()[0], 0);
block4.fi_slice_mut()[0] = someval;
drop(block4);
let block5 = block_mgr.get_block_for_write(&block_id5, DataBlock::new, false, 0).expect("failed to get block");
drop(block5);
let block1 = block_mgr.get_block(&block_id1).expect("failed to get block");
assert!(block1.get_entry(entry_id).is_ok());
drop(block1);
let block2 = block_mgr.get_file_header_block(&block_id2).expect("failed to get block");
assert_eq!(block2.get_full_cnt(), full_cnt);
drop(block2);
let block3 = block_mgr.get_extent_header_block(&block_id3).expect("failed to get block");
assert_eq!(block3.get_full_cnt(), full_cnt);
drop(block3);
let block4 = block_mgr.get_free_info_block(&block_id4).expect("failed to get block");
assert_eq!(block4.fi_slice()[0], someval);
drop(block4);
assert_eq!(*conf.get_conf().get_block_fill_ratio() as usize * block_size / 100, block_mgr.block_fill_size());
let mut block_iter = block_mgr.get_iter();
while let Some(_desc) = block_iter.next() { }
for i in 0..block_num as usize {
let desc = block_mgr.get_block_desc(i).unwrap();
assert!(block_mgr.get_block_by_idx(desc.id, desc.block_id, desc.block_type).is_some());
}
let mut block1 = block_mgr.get_block_mut(&block_id1).expect("failed to get block");
block_mgr.write_block(&mut block1).expect("Faield to write block");
let dsblock = block_mgr.ds.load_block(&block_id1, FileState::InUse).expect("Failed to load block");
assert_eq!(&block1.slice(), &dsblock.deref().deref());
drop(dsblock);
assert!(block_mgr.allocate_on_cache_mut_no_lock(block_id6, BlockType::CheckpointBlock).is_ok());
let mut files = Vec::new();
block_mgr.get_checkpoint_files(&mut files);
assert_eq!(1, files.len());
assert_eq!(init_fdesc[2], files[0]);
block_mgr.get_versioning_files(&mut files);
assert_eq!(1, files.len());
files[0].extent_num = 3; // by default all extents are discarded
assert_eq!(init_fdesc[1], files[0]);
block_mgr.get_data_files(&mut files);
assert_eq!(1, files.len());
assert_eq!(init_fdesc[0], files[0]);
let desc_id = 99;
let bdesc = block_mgr.get_block_desc(desc_id).expect("Failed to get block desc");
assert_eq!(bdesc.dirty, false);
assert_eq!(bdesc.checkpoint_block_id, BlockId::new());
assert_eq!(bdesc.checkpoint_written, true);
let block_id = BlockId::init(1,1,1);
block_mgr.set_checkpoint_written(desc_id, false);
block_mgr.set_checkpoint_block_id(desc_id, block_id);
block_mgr.set_dirty(desc_id, true);
let bdesc = block_mgr.get_block_desc(desc_id).expect("Failed to get block desc");
assert_eq!(bdesc.dirty, true);
assert_eq!(bdesc.checkpoint_block_id, block_id);
assert_eq!(bdesc.checkpoint_written, false);
assert!(block_mgr.add_extent(3).is_ok());
let fdesc = block_mgr.ds.get_file_desc(3).expect("Failed to get file desc");
assert_eq!(fdesc.extent_num, 4);
let cloned = block_mgr.clone();
drop(cloned);
assert_eq!(block_mgr.get_block_size(), block_size);
assert!(block_mgr.add_datafile(FileType::VersioningStoreFile, 12, 2, 6500).is_ok());
let fdesc = block_mgr.ds.get_file_desc(6).expect("Failed to get file desc");
assert_eq!(fdesc.extent_num, 2);
assert_eq!(fdesc.extent_size, 12);
assert_eq!(fdesc.max_extent_num, 6500);
assert_eq!(fdesc.file_type, FileType::VersioningStoreFile);
}
}
| 39.089695 | 195 | 0.642045 |
237e5cacc0d9c7c57224031fa891a3c7e3075e40 | 2,430 | //! Transaction hashes
use crate::error::Error;
#[cfg(feature = "serde")]
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use std::{
fmt::{self, Debug, Display},
str::FromStr,
};
use subtle::{self, ConstantTimeEq};
use subtle_encoding::hex;
/// Size of a transaction hash in bytes
pub const LENGTH: usize = 20;
/// Trannsaction hashes
#[derive(Copy, Clone, Hash)]
pub struct Hash([u8; LENGTH]);
impl Hash {
/// Create a new transaction hash from raw bytes
pub fn new(bytes: [u8; LENGTH]) -> Hash {
Hash(bytes)
}
/// Borrow the transaction hash as a byte slice
pub fn as_bytes(&self) -> &[u8] {
&self.0[..]
}
}
impl AsRef<[u8]> for Hash {
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
impl ConstantTimeEq for Hash {
#[inline]
fn ct_eq(&self, other: &Hash) -> subtle::Choice {
self.as_bytes().ct_eq(other.as_bytes())
}
}
impl Display for Hash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for byte in &self.0 {
write!(f, "{:02X}", byte)?;
}
Ok(())
}
}
impl Debug for Hash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "transactionn::Hash({})", self)
}
}
/// Decode transaction hash from hex
impl FromStr for Hash {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// Accept either upper or lower case hex
let bytes = hex::decode_upper(s)
.or_else(|_| hex::decode(s))
.map_err(|_| Error::Parse)?;
if bytes.len() != LENGTH {
return Err(Error::Parse);
}
let mut result_bytes = [0u8; LENGTH];
result_bytes.copy_from_slice(&bytes);
Ok(Hash(result_bytes))
}
}
#[cfg(feature = "serde")]
impl<'de> Deserialize<'de> for Hash {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Self::from_str(&s).map_err(|_| {
de::Error::custom(format!(
"expected {}-character hex string, got {:?}",
LENGTH * 2,
s
))
})
}
}
#[cfg(feature = "serde")]
impl Serialize for Hash {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.to_string().serialize(serializer)
}
}
| 23.592233 | 82 | 0.556379 |
09ef2bb275f2b165aa457d191142651461d8f8ca | 321 | extern crate crabquery;
use crabquery::*;
#[test]
fn test_docs_rs_index() {
let document = Document::from(include_str!("fixtures/docs_rs.html"));
let els = document.select("div.pure-u-sm-4-24");
assert_eq!(els.len(), 15);
let els = document.select(".pure-u-sm-4-24");
assert_eq!(els.len(), 15);
}
| 21.4 | 73 | 0.641745 |
dd912cabd87189b3c009b2e16c2756ffeddd0553 | 414 | //! A simple program using `kommand` that copies from an
//! `InputByteStream` into an `OutputByteStream`.
use nameless::{InputByteStream, OutputByteStream};
use std::io::copy;
/// # Arguments
///
/// * `input` - Input source
/// * `output` - Output sink
#[kommand::main]
fn main(mut input: InputByteStream, mut output: OutputByteStream) -> anyhow::Result<()> {
copy(&mut input, &mut output)?;
Ok(())
}
| 24.352941 | 89 | 0.661836 |
22021167fcae72bd160dadbdbec2590ca7add205 | 2,943 | use std::io::{Cursor, Read};
use bitvec::prelude::*;
use byteorder::{LittleEndian, ReadBytesExt};
use crate::error::{OBIError, OBIResult};
use crate::{CompressionType, FileHeader, Image, ImageInfoHeader};
pub fn decode_image(obi_data: &mut Cursor<Vec<u8>>) -> OBIResult<Image> {
// file header
let version = obi_data
.read_u16::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let file_size = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let data_offset = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let file_header = FileHeader {
file_size,
version,
data_offset,
};
// image info header
let width = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let height = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let compression_type = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let post_compression_size = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Decode)?;
let image_info_header = ImageInfoHeader {
width,
height,
compression_type,
post_compression_size,
};
let data: Vec<bool> = match CompressionType::from_u32(compression_type) {
CompressionType::RLE => {
let mut rest = vec![];
let mut lengths = vec![];
loop {
let l = obi_data
.read_u32::<LittleEndian>()
.map_err(|_| OBIError::Encode)?;
if l == 0 {
break;
}
lengths.push(l);
}
obi_data
.read_to_end(&mut rest)
.map_err(|_| OBIError::Decode)?;
rest.iter()
.map(|&b| {
BitVec::<Lsb0, u8>::from_element(b)
.into_iter()
.map(|e| e as bool)
.collect::<Vec<bool>>()
})
.flatten()
.into_iter()
.zip(lengths)
.map(|(d, l)| vec![d; l as usize])
.flatten()
.collect::<Vec<bool>>()
}
_ => {
let mut rest = vec![];
obi_data
.read_to_end(&mut rest)
.map_err(|_| OBIError::Decode)?;
rest.iter()
.map(|&b| {
BitVec::<Lsb0, u8>::from_element(b)
.into_iter()
.map(|e| e as bool)
.collect::<Vec<bool>>()
})
.flatten()
.collect::<Vec<_>>()
}
};
Ok(Image {
file_header,
image_info_header,
data,
})
}
| 30.030612 | 77 | 0.464832 |
710baf181520afcf3b831af31b86e99ca142bf8a | 698 | fn main() {
windows::build! {
Windows::Win32::System::Threading::{CreateProcessW, OpenThread},
Windows::Win32::System::Diagnostics::Debug::{WaitForDebugEvent, ContinueDebugEvent, GetThreadContext, SetThreadContext, DebugActiveProcessStop, GetLastError},
Windows::Win32::System::LibraryLoader::GetModuleFileNameW,
Windows::Win32::Foundation::{PWSTR, PSTR, HINSTANCE, BOOL, HANDLE},
Windows::Win32::Foundation::{CloseHandle},
Windows::Win32::Security::SECURITY_ATTRIBUTES,
Windows::Win32::UI::WindowsAndMessaging::MessageBoxA,
Windows::Win32::System::Memory::{VirtualAllocEx, VIRTUAL_ALLOCATION_TYPE, PAGE_PROTECTION_FLAGS},
}
} | 58.166667 | 166 | 0.717765 |
e924b069a4e6f8996069b26eae54d70093110e5f | 1,006 | use counted_array::counted_array;
use crate::game::traj_command_table::*;
use crate::game::{FormationIndex, TrajCommand};
const fn p(x: u8, y: u8) -> FormationIndex {
FormationIndex(x as usize, y as usize)
}
pub const UNIT_BASE: [usize; 3] = [0, 4, 8];
pub const ORDER: [FormationIndex; 14] = [
//
p(3, 1),
p(4, 1),
p(5, 1),
p(6, 1),
//
p(0, 4),
p(0, 3),
p(9, 4),
p(9, 3),
//
p(2, 2),
p(6, 2),
//
p(3, 4),
p(4, 4),
p(5, 4),
p(6, 4),
];
pub struct UnitTableEntry<'a> {
pub pat: usize,
pub table: &'a [TrajCommand],
pub flip_x: bool,
}
counted_array!(pub const UNIT_TABLE: [[UnitTableEntry; 4]; _] = [
[
UnitTableEntry { pat: 1, table: &COMMAND_TABLE1, flip_x: false },
UnitTableEntry { pat: 0, table: &COMMAND_TABLE2, flip_x: false },
UnitTableEntry { pat: 4, table: &COMMAND_TABLE1, flip_x: false },
UnitTableEntry { pat: 1, table: &COMMAND_TABLE3, flip_x: false },
]
]);
| 21.404255 | 73 | 0.554672 |
910a9cf2f1e9d0c6b60b07700741001970e105e8 | 627 | /// # 1.9 String Rotation
pub fn is_rotation(s1: &str, s2: &str) -> bool {
if s1.len() == 0 || s1.len() != s2.len() {
return false
}
let concatenation = format!("{}{}", s2, s2);
concatenation.contains(s1)
}
#[cfg(test)]
mod tests {
#[test]
fn check_is_rotation() {
assert!(super::is_rotation("waterbottle", "erbottlewat"));
assert!(super::is_rotation("xy", "yx"));
assert!(super::is_rotation("a", "a"));
assert!(super::is_rotation("blah", "blah"));
assert!(!super::is_rotation("", ""));
assert!(!super::is_rotation("blah", "hello"));
}
}
| 24.115385 | 66 | 0.54067 |
ede3e9138f67db8ee5b6c3f46e9e10fa0b9d022e | 614 | use std::error::Error;
use nagiosplugin::{Metric, Resource, Runner, TriggerIfValue};
fn main() {
Runner::new().safe_run(do_check).print_and_exit()
}
fn do_check() -> Result<Resource, Box<dyn Error>> {
// The first metric will not issue an alarm, the second one will.
let resource = Resource::new()
.with_prefix("FOO")
.with_description("This is a simple test plugin")
.with_result(Metric::new("test", 15).with_thresholds(20, 50, TriggerIfValue::Greater))
.with_result(Metric::new("alerting", 42).with_thresholds(40, 50, TriggerIfValue::Greater));
Ok(resource)
}
| 32.315789 | 99 | 0.675896 |
f9be3c15b2490bbbb5fa7fb20c4c96a2b8f414e5 | 477 | // SPDX-License-Identifier: Apache-2.0
//! `memspike`'s primary intention is to trigger memory ballooning in
//! VM-based keeps. This will help test the ballooning itself as well
//! as memory pinning for SEV.
#![feature(core_ffi_c)]
use rust_exec_tests::musl_fsbase_fix;
use std::collections::TryReserveError;
musl_fsbase_fix!();
fn main() -> Result<(), TryReserveError> {
let mut alloc: Vec<u8> = Vec::new();
let _ = alloc.try_reserve(40_000_000)?;
Ok(())
}
| 25.105263 | 69 | 0.702306 |
fc0cf98ceba8324f3709d9e57a24c454522ef69e | 27,520 | //! Rewrite a list some items with overflow.
use std::cmp::min;
use itertools::Itertools;
use syntax::parse::token::DelimToken;
use syntax::source_map::Span;
use syntax::{ast, ptr};
use crate::closures;
use crate::config::lists::*;
use crate::config::Version;
use crate::expr::{
can_be_overflowed_expr, is_every_expr_simple, is_method_call, is_nested_call, is_simple_expr,
rewrite_cond,
};
use crate::lists::{
definitive_tactic, itemize_list, write_list, ListFormatting, ListItem, Separator,
};
use crate::macros::MacroArg;
use crate::patterns::{can_be_overflowed_pat, TuplePatField};
use crate::rewrite::{Rewrite, RewriteContext};
use crate::shape::Shape;
use crate::source_map::SpanUtils;
use crate::spanned::Spanned;
use crate::types::{can_be_overflowed_type, SegmentParam};
use crate::utils::{count_newlines, extra_offset, first_line_width, last_line_width, mk_sp};
const SHORT_ITEM_THRESHOLD: usize = 10;
/// A list of `format!`-like macros, that take a long format string and a list of arguments to
/// format.
///
/// Organized as a list of `(&str, usize)` tuples, giving the name of the macro and the number of
/// arguments before the format string (none for `format!("format", ...)`, one for `assert!(result,
/// "format", ...)`, two for `assert_eq!(left, right, "format", ...)`).
const SPECIAL_MACRO_WHITELIST: &[(&str, usize)] = &[
// format! like macros
// From the Rust Standard Library.
("eprint!", 0),
("eprintln!", 0),
("format!", 0),
("format_args!", 0),
("print!", 0),
("println!", 0),
("panic!", 0),
("unreachable!", 0),
// From the `log` crate.
("debug!", 0),
("error!", 0),
("info!", 0),
("warn!", 0),
// write! like macros
("assert!", 1),
("debug_assert!", 1),
("write!", 1),
("writeln!", 1),
// assert_eq! like macros
("assert_eq!", 2),
("assert_ne!", 2),
("debug_assert_eq!", 2),
("debug_assert_ne!", 2),
];
const SPECIAL_ATTR_WHITELIST: &[(&str, usize)] = &[
// From the `failure` crate.
("fail", 0),
];
#[derive(Debug)]
pub(crate) enum OverflowableItem<'a> {
Expr(&'a ast::Expr),
GenericParam(&'a ast::GenericParam),
MacroArg(&'a MacroArg),
NestedMetaItem(&'a ast::NestedMetaItem),
SegmentParam(&'a SegmentParam<'a>),
StructField(&'a ast::StructField),
TuplePatField(&'a TuplePatField<'a>),
Ty(&'a ast::Ty),
}
impl<'a> Rewrite for OverflowableItem<'a> {
fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
self.map(|item| item.rewrite(context, shape))
}
}
impl<'a> Spanned for OverflowableItem<'a> {
fn span(&self) -> Span {
self.map(|item| item.span())
}
}
impl<'a> OverflowableItem<'a> {
fn has_attrs(&self) -> bool {
match self {
OverflowableItem::Expr(ast::Expr { attrs, .. })
| OverflowableItem::GenericParam(ast::GenericParam { attrs, .. }) => !attrs.is_empty(),
OverflowableItem::StructField(ast::StructField { attrs, .. }) => !attrs.is_empty(),
OverflowableItem::MacroArg(MacroArg::Expr(expr)) => !expr.attrs.is_empty(),
OverflowableItem::MacroArg(MacroArg::Item(item)) => !item.attrs.is_empty(),
_ => false,
}
}
pub(crate) fn map<F, T>(&self, f: F) -> T
where
F: Fn(&dyn IntoOverflowableItem<'a>) -> T,
{
match self {
OverflowableItem::Expr(expr) => f(*expr),
OverflowableItem::GenericParam(gp) => f(*gp),
OverflowableItem::MacroArg(macro_arg) => f(*macro_arg),
OverflowableItem::NestedMetaItem(nmi) => f(*nmi),
OverflowableItem::SegmentParam(sp) => f(*sp),
OverflowableItem::StructField(sf) => f(*sf),
OverflowableItem::TuplePatField(pat) => f(*pat),
OverflowableItem::Ty(ty) => f(*ty),
}
}
pub(crate) fn is_simple(&self) -> bool {
match self {
OverflowableItem::Expr(expr) => is_simple_expr(expr),
OverflowableItem::MacroArg(MacroArg::Keyword(..)) => true,
OverflowableItem::MacroArg(MacroArg::Expr(expr)) => is_simple_expr(expr),
OverflowableItem::NestedMetaItem(nested_meta_item) => match nested_meta_item {
ast::NestedMetaItem::Literal(..) => true,
ast::NestedMetaItem::MetaItem(ref meta_item) => match meta_item.node {
ast::MetaItemKind::Word => true,
_ => false,
},
},
_ => false,
}
}
pub(crate) fn is_expr(&self) -> bool {
match self {
OverflowableItem::Expr(..) => true,
OverflowableItem::MacroArg(MacroArg::Expr(..)) => true,
_ => false,
}
}
pub(crate) fn is_nested_call(&self) -> bool {
match self {
OverflowableItem::Expr(expr) => is_nested_call(expr),
OverflowableItem::MacroArg(MacroArg::Expr(expr)) => is_nested_call(expr),
_ => false,
}
}
pub(crate) fn to_expr(&self) -> Option<&'a ast::Expr> {
match self {
OverflowableItem::Expr(expr) => Some(expr),
OverflowableItem::MacroArg(macro_arg) => match macro_arg {
MacroArg::Expr(ref expr) => Some(expr),
_ => None,
},
_ => None,
}
}
pub(crate) fn can_be_overflowed(&self, context: &RewriteContext<'_>, len: usize) -> bool {
match self {
OverflowableItem::Expr(expr) => can_be_overflowed_expr(context, expr, len),
OverflowableItem::MacroArg(macro_arg) => match macro_arg {
MacroArg::Expr(ref expr) => can_be_overflowed_expr(context, expr, len),
MacroArg::Ty(ref ty) => can_be_overflowed_type(context, ty, len),
MacroArg::Pat(..) => false,
MacroArg::Item(..) => len == 1,
MacroArg::Keyword(..) => false,
},
OverflowableItem::NestedMetaItem(nested_meta_item) if len == 1 => {
match nested_meta_item {
ast::NestedMetaItem::Literal(..) => false,
ast::NestedMetaItem::MetaItem(..) => true,
}
}
OverflowableItem::SegmentParam(seg) => match seg {
SegmentParam::Type(ty) => can_be_overflowed_type(context, ty, len),
_ => false,
},
OverflowableItem::TuplePatField(pat) => can_be_overflowed_pat(context, pat, len),
OverflowableItem::Ty(ty) => can_be_overflowed_type(context, ty, len),
_ => false,
}
}
fn whitelist(&self) -> &'static [(&'static str, usize)] {
match self {
OverflowableItem::MacroArg(..) => SPECIAL_MACRO_WHITELIST,
OverflowableItem::NestedMetaItem(..) => SPECIAL_ATTR_WHITELIST,
_ => &[],
}
}
}
pub(crate) trait IntoOverflowableItem<'a>: Rewrite + Spanned {
fn into_overflowable_item(&'a self) -> OverflowableItem<'a>;
}
impl<'a, T: 'a + IntoOverflowableItem<'a>> IntoOverflowableItem<'a> for ptr::P<T> {
fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
(**self).into_overflowable_item()
}
}
macro_rules! impl_into_overflowable_item_for_ast_node {
($($ast_node:ident),*) => {
$(
impl<'a> IntoOverflowableItem<'a> for ast::$ast_node {
fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
OverflowableItem::$ast_node(self)
}
}
)*
}
}
macro_rules! impl_into_overflowable_item_for_rustfmt_types {
([$($ty:ident),*], [$($ty_with_lifetime:ident),*]) => {
$(
impl<'a> IntoOverflowableItem<'a> for $ty {
fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
OverflowableItem::$ty(self)
}
}
)*
$(
impl<'a> IntoOverflowableItem<'a> for $ty_with_lifetime<'a> {
fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
OverflowableItem::$ty_with_lifetime(self)
}
}
)*
}
}
impl_into_overflowable_item_for_ast_node!(Expr, GenericParam, NestedMetaItem, StructField, Ty);
impl_into_overflowable_item_for_rustfmt_types!([MacroArg], [SegmentParam, TuplePatField]);
pub(crate) fn into_overflowable_list<'a, T>(
iter: impl Iterator<Item = &'a T>,
) -> impl Iterator<Item = OverflowableItem<'a>>
where
T: 'a + IntoOverflowableItem<'a>,
{
iter.map(|x| IntoOverflowableItem::into_overflowable_item(x))
}
pub(crate) fn rewrite_with_parens<'a, T: 'a + IntoOverflowableItem<'a>>(
context: &'a RewriteContext<'_>,
ident: &'a str,
items: impl Iterator<Item = &'a T>,
shape: Shape,
span: Span,
item_max_width: usize,
force_separator_tactic: Option<SeparatorTactic>,
) -> Option<String> {
Context::new(
context,
items,
ident,
shape,
span,
"(",
")",
item_max_width,
force_separator_tactic,
None,
)
.rewrite(shape)
}
pub(crate) fn rewrite_with_angle_brackets<'a, T: 'a + IntoOverflowableItem<'a>>(
context: &'a RewriteContext<'_>,
ident: &'a str,
items: impl Iterator<Item = &'a T>,
shape: Shape,
span: Span,
) -> Option<String> {
Context::new(
context,
items,
ident,
shape,
span,
"<",
">",
context.config.max_width(),
None,
None,
)
.rewrite(shape)
}
pub(crate) fn rewrite_with_square_brackets<'a, T: 'a + IntoOverflowableItem<'a>>(
context: &'a RewriteContext<'_>,
name: &'a str,
items: impl Iterator<Item = &'a T>,
shape: Shape,
span: Span,
force_separator_tactic: Option<SeparatorTactic>,
delim_token: Option<DelimToken>,
) -> Option<String> {
let (lhs, rhs) = match delim_token {
Some(DelimToken::Paren) => ("(", ")"),
Some(DelimToken::Brace) => ("{", "}"),
_ => ("[", "]"),
};
Context::new(
context,
items,
name,
shape,
span,
lhs,
rhs,
context.config.width_heuristics().array_width,
force_separator_tactic,
Some(("[", "]")),
)
.rewrite(shape)
}
struct Context<'a> {
context: &'a RewriteContext<'a>,
items: Vec<OverflowableItem<'a>>,
ident: &'a str,
prefix: &'static str,
suffix: &'static str,
one_line_shape: Shape,
nested_shape: Shape,
span: Span,
item_max_width: usize,
one_line_width: usize,
force_separator_tactic: Option<SeparatorTactic>,
custom_delims: Option<(&'a str, &'a str)>,
}
impl<'a> Context<'a> {
fn new<T: 'a + IntoOverflowableItem<'a>>(
context: &'a RewriteContext<'_>,
items: impl Iterator<Item = &'a T>,
ident: &'a str,
shape: Shape,
span: Span,
prefix: &'static str,
suffix: &'static str,
item_max_width: usize,
force_separator_tactic: Option<SeparatorTactic>,
custom_delims: Option<(&'a str, &'a str)>,
) -> Context<'a> {
let used_width = extra_offset(ident, shape);
// 1 = `()`
let one_line_width = shape.width.saturating_sub(used_width + 2);
// 1 = "(" or ")"
let one_line_shape = shape
.offset_left(last_line_width(ident) + 1)
.and_then(|shape| shape.sub_width(1))
.unwrap_or(Shape { width: 0, ..shape });
let nested_shape = shape_from_indent_style(context, shape, used_width + 2, used_width + 1);
Context {
context,
items: into_overflowable_list(items).collect(),
ident,
one_line_shape,
nested_shape,
span,
prefix,
suffix,
item_max_width,
one_line_width,
force_separator_tactic,
custom_delims,
}
}
fn last_item(&self) -> Option<&OverflowableItem<'_>> {
self.items.last()
}
fn items_span(&self) -> Span {
let span_lo = self
.context
.snippet_provider
.span_after(self.span, self.prefix);
mk_sp(span_lo, self.span.hi())
}
fn rewrite_last_item_with_overflow(
&self,
last_list_item: &mut ListItem,
shape: Shape,
) -> Option<String> {
let last_item = self.last_item()?;
let rewrite = match last_item {
OverflowableItem::Expr(ref expr) => {
match expr.node {
// When overflowing the closure which consists of a single control flow
// expression, force to use block if its condition uses multi line.
ast::ExprKind::Closure(..) => {
// If the argument consists of multiple closures, we do not overflow
// the last closure.
if closures::args_have_many_closure(&self.items) {
None
} else {
closures::rewrite_last_closure(self.context, expr, shape)
}
}
// When overflowing the expressions which consists of a control flow
// expression, avoid condition to use multi line.
ast::ExprKind::If(..)
| ast::ExprKind::ForLoop(..)
| ast::ExprKind::Loop(..)
| ast::ExprKind::While(..)
| ast::ExprKind::Match(..) => {
let multi_line = rewrite_cond(self.context, expr, shape)
.map_or(false, |cond| cond.contains('\n'));
if multi_line {
None
} else {
expr.rewrite(self.context, shape)
}
}
_ => expr.rewrite(self.context, shape),
}
}
item => item.rewrite(self.context, shape),
};
if let Some(rewrite) = rewrite {
// splitn(2, *).next().unwrap() is always safe.
let rewrite_first_line = Some(rewrite.splitn(2, '\n').next().unwrap().to_owned());
last_list_item.item = rewrite_first_line;
Some(rewrite)
} else {
None
}
}
fn default_tactic(&self, list_items: &[ListItem]) -> DefinitiveListTactic {
definitive_tactic(
list_items,
ListTactic::LimitedHorizontalVertical(self.item_max_width),
Separator::Comma,
self.one_line_width,
)
}
fn try_overflow_last_item(&self, list_items: &mut Vec<ListItem>) -> DefinitiveListTactic {
// 1 = "("
let combine_arg_with_callee = self.items.len() == 1
&& self.items[0].is_expr()
&& !self.items[0].has_attrs()
&& self.ident.len() < self.context.config.tab_spaces();
let overflow_last = combine_arg_with_callee || can_be_overflowed(self.context, &self.items);
// Replace the last item with its first line to see if it fits with
// first arguments.
let placeholder = if overflow_last {
let old_value = *self.context.force_one_line_chain.borrow();
match self.last_item() {
Some(OverflowableItem::Expr(expr))
if !combine_arg_with_callee && is_method_call(expr) =>
{
self.context.force_one_line_chain.replace(true);
}
Some(OverflowableItem::MacroArg(MacroArg::Expr(expr)))
if !combine_arg_with_callee
&& is_method_call(expr)
&& self.context.config.version() == Version::Two =>
{
self.context.force_one_line_chain.replace(true);
}
_ => (),
}
let result = last_item_shape(
&self.items,
list_items,
self.one_line_shape,
self.item_max_width,
)
.and_then(|arg_shape| {
self.rewrite_last_item_with_overflow(
&mut list_items[self.items.len() - 1],
arg_shape,
)
});
self.context.force_one_line_chain.replace(old_value);
result
} else {
None
};
let mut tactic = definitive_tactic(
&*list_items,
ListTactic::LimitedHorizontalVertical(self.item_max_width),
Separator::Comma,
self.one_line_width,
);
// Replace the stub with the full overflowing last argument if the rewrite
// succeeded and its first line fits with the other arguments.
match (overflow_last, tactic, placeholder) {
(true, DefinitiveListTactic::Horizontal, Some(ref overflowed))
if self.items.len() == 1 =>
{
// When we are rewriting a nested function call, we restrict the
// budget for the inner function to avoid them being deeply nested.
// However, when the inner function has a prefix or a suffix
// (e.g., `foo() as u32`), this budget reduction may produce poorly
// formatted code, where a prefix or a suffix being left on its own
// line. Here we explicitlly check those cases.
if count_newlines(overflowed) == 1 {
let rw = self
.items
.last()
.and_then(|last_item| last_item.rewrite(self.context, self.nested_shape));
let no_newline = rw.as_ref().map_or(false, |s| !s.contains('\n'));
if no_newline {
list_items[self.items.len() - 1].item = rw;
} else {
list_items[self.items.len() - 1].item = Some(overflowed.to_owned());
}
} else {
list_items[self.items.len() - 1].item = Some(overflowed.to_owned());
}
}
(true, DefinitiveListTactic::Horizontal, placeholder @ Some(..)) => {
list_items[self.items.len() - 1].item = placeholder;
}
_ if !self.items.is_empty() => {
list_items[self.items.len() - 1].item = self
.items
.last()
.and_then(|last_item| last_item.rewrite(self.context, self.nested_shape));
// Use horizontal layout for a function with a single argument as long as
// everything fits in a single line.
// `self.one_line_width == 0` means vertical layout is forced.
if self.items.len() == 1
&& self.one_line_width != 0
&& !list_items[0].has_comment()
&& !list_items[0].inner_as_ref().contains('\n')
&& crate::lists::total_item_width(&list_items[0]) <= self.one_line_width
{
tactic = DefinitiveListTactic::Horizontal;
} else {
tactic = self.default_tactic(list_items);
if tactic == DefinitiveListTactic::Vertical {
if let Some((all_simple, num_args_before)) =
maybe_get_args_offset(self.ident, &self.items)
{
let one_line = all_simple
&& definitive_tactic(
&list_items[..num_args_before],
ListTactic::HorizontalVertical,
Separator::Comma,
self.nested_shape.width,
) == DefinitiveListTactic::Horizontal
&& definitive_tactic(
&list_items[num_args_before + 1..],
ListTactic::HorizontalVertical,
Separator::Comma,
self.nested_shape.width,
) == DefinitiveListTactic::Horizontal;
if one_line {
tactic = DefinitiveListTactic::SpecialMacro(num_args_before);
};
} else if is_every_expr_simple(&self.items) && no_long_items(list_items) {
tactic = DefinitiveListTactic::Mixed;
}
}
}
}
_ => (),
}
tactic
}
fn rewrite_items(&self) -> Option<(bool, String)> {
let span = self.items_span();
let items = itemize_list(
self.context.snippet_provider,
self.items.iter(),
self.suffix,
",",
|item| item.span().lo(),
|item| item.span().hi(),
|item| item.rewrite(self.context, self.nested_shape),
span.lo(),
span.hi(),
true,
);
let mut list_items: Vec<_> = items.collect();
// Try letting the last argument overflow to the next line with block
// indentation. If its first line fits on one line with the other arguments,
// we format the function arguments horizontally.
let tactic = self.try_overflow_last_item(&mut list_items);
let trailing_separator = if let Some(tactic) = self.force_separator_tactic {
tactic
} else if !self.context.use_block_indent() {
SeparatorTactic::Never
} else {
self.context.config.trailing_comma()
};
let ends_with_newline = match tactic {
DefinitiveListTactic::Vertical | DefinitiveListTactic::Mixed => {
self.context.use_block_indent()
}
_ => false,
};
let fmt = ListFormatting::new(self.nested_shape, self.context.config)
.tactic(tactic)
.trailing_separator(trailing_separator)
.ends_with_newline(ends_with_newline);
write_list(&list_items, &fmt)
.map(|items_str| (tactic == DefinitiveListTactic::Horizontal, items_str))
}
fn wrap_items(&self, items_str: &str, shape: Shape, is_extendable: bool) -> String {
let shape = Shape {
width: shape.width.saturating_sub(last_line_width(self.ident)),
..shape
};
let (prefix, suffix) = match self.custom_delims {
Some((lhs, rhs)) => (lhs, rhs),
_ => (self.prefix, self.suffix),
};
let extend_width = if items_str.is_empty() {
2
} else {
first_line_width(items_str) + 1
};
let nested_indent_str = self
.nested_shape
.indent
.to_string_with_newline(self.context.config);
let indent_str = shape
.block()
.indent
.to_string_with_newline(self.context.config);
let mut result = String::with_capacity(
self.ident.len() + items_str.len() + 2 + indent_str.len() + nested_indent_str.len(),
);
result.push_str(self.ident);
result.push_str(prefix);
let force_single_line = if self.context.config.version() == Version::Two {
!self.context.use_block_indent() || (is_extendable && extend_width <= shape.width)
} else {
// 2 = `()`
let fits_one_line = items_str.len() + 2 <= shape.width;
!self.context.use_block_indent()
|| (self.context.inside_macro() && !items_str.contains('\n') && fits_one_line)
|| (is_extendable && extend_width <= shape.width)
};
if force_single_line {
result.push_str(items_str);
} else {
if !items_str.is_empty() {
result.push_str(&nested_indent_str);
result.push_str(items_str);
}
result.push_str(&indent_str);
}
result.push_str(suffix);
result
}
fn rewrite(&self, shape: Shape) -> Option<String> {
let (extendable, items_str) = self.rewrite_items()?;
// If we are using visual indent style and failed to format, retry with block indent.
if !self.context.use_block_indent()
&& need_block_indent(&items_str, self.nested_shape)
&& !extendable
{
self.context.use_block.replace(true);
let result = self.rewrite(shape);
self.context.use_block.replace(false);
return result;
}
Some(self.wrap_items(&items_str, shape, extendable))
}
}
fn need_block_indent(s: &str, shape: Shape) -> bool {
s.lines().skip(1).any(|s| {
s.find(|c| !char::is_whitespace(c))
.map_or(false, |w| w + 1 < shape.indent.width())
})
}
fn can_be_overflowed(context: &RewriteContext<'_>, items: &[OverflowableItem<'_>]) -> bool {
items
.last()
.map_or(false, |x| x.can_be_overflowed(context, items.len()))
}
/// Returns a shape for the last argument which is going to be overflowed.
fn last_item_shape(
lists: &[OverflowableItem<'_>],
items: &[ListItem],
shape: Shape,
args_max_width: usize,
) -> Option<Shape> {
if items.len() == 1 && !lists.get(0)?.is_nested_call() {
return Some(shape);
}
let offset = items
.iter()
.dropping_back(1)
.map(|i| {
// 2 = ", "
2 + i.inner_as_ref().len()
})
.sum();
Shape {
width: min(args_max_width, shape.width),
..shape
}
.offset_left(offset)
}
fn shape_from_indent_style(
context: &RewriteContext<'_>,
shape: Shape,
overhead: usize,
offset: usize,
) -> Shape {
let (shape, overhead) = if context.use_block_indent() {
let shape = shape
.block()
.block_indent(context.config.tab_spaces())
.with_max_width(context.config);
(shape, 1) // 1 = ","
} else {
(shape.visual_indent(offset), overhead)
};
Shape {
width: shape.width.saturating_sub(overhead),
..shape
}
}
fn no_long_items(list: &[ListItem]) -> bool {
list.iter()
.all(|item| item.inner_as_ref().len() <= SHORT_ITEM_THRESHOLD)
}
/// In case special-case style is required, returns an offset from which we start horizontal layout.
pub(crate) fn maybe_get_args_offset(
callee_str: &str,
args: &[OverflowableItem<'_>],
) -> Option<(bool, usize)> {
if let Some(&(_, num_args_before)) = args
.get(0)?
.whitelist()
.iter()
.find(|&&(s, _)| s == callee_str)
{
let all_simple = args.len() > num_args_before
&& is_every_expr_simple(&args[0..num_args_before])
&& is_every_expr_simple(&args[num_args_before + 1..]);
Some((all_simple, num_args_before))
} else {
None
}
}
| 34.968234 | 100 | 0.534956 |
145caeae7e81f3aad82e89b393c22c96a24c6aa3 | 415 | #![no_std]
#![feature(sort_internals)]
#[macro_use]
pub mod macros;
pub mod button;
pub mod consts;
pub mod delay;
pub mod display;
pub mod error;
pub mod line;
pub mod model;
pub mod protocol;
pub mod rotary_encoder;
pub mod sdcard;
pub mod time;
pub mod types;
pub mod uart_serial;
pub mod usb_serial;
pub mod prelude {
pub use crate::consts::*;
pub use crate::error::*;
pub use crate::types::*;
}
| 15.37037 | 29 | 0.698795 |
09ee47aa7c58e994dc238a5b3b123be0132cb081 | 5,719 | #![allow(dead_code)]
use components::stickers::{sprite::ext::*, text::ext::*};
use once_cell::sync::OnceCell;
use shared::{
domain::{
audio::AudioId,
image::ImageId,
jig::{
module::{
body::{
Audio, Image, Instructions, Transform,
_groups::design::{
Backgrounds, BaseContent, Sprite, Sticker, Text, Trace, TraceKind,
TraceShape,
},
tapping_board::{
Content, Hint, Mode, ModuleData as RawData, Next, PlaySettings,
},
},
ModuleId,
},
JigId,
},
},
media::MediaLibrary,
};
use utils::prelude::*;
use uuid::Uuid;
pub static SETTINGS: OnceCell<DebugSettings> = OnceCell::new();
//const IMAGE_UUID:&'static str = "bf2fe548-7ffd-11eb-b3ab-579026da8b36";
const IMAGE_UUID: &str = "9da11e0a-c17b-11eb-b863-570eea18a3bd";
const AUDIO_UUID: &str = "734314da-0b07-11ec-95f0-2b4855fa3cb8";
pub const DEBUG_TEXT:&str = "{\"version\":\"0.1.0\",\"content\":[{\"children\":[{\"text\":\"text from rust\",\"element\":\"P1\"}]}]}";
#[derive(Debug, Default)]
pub struct DebugSettings {
pub data: Option<RawData>,
pub skip_load_jig: bool,
}
#[derive(Clone, Debug, PartialEq)]
pub struct InitData {
pub stickers: Vec<InitSticker>,
pub traces: Vec<InitTrace>,
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum InitSticker {
Text,
Sprite,
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum InitTrace {
//x, y, w, h
Ellipse(f64, f64, f64, f64),
}
impl DebugSettings {
pub fn debug(init_data: Option<InitData>) -> DebugSettings {
DebugSettings {
//debug always has to have some data
//otherwise it will fail at load time
data: Some(if let Some(init_data) = init_data {
RawData {
content: Some(Content {
mode: Mode::Words,
play_settings: PlaySettings {
hint: Hint::None,
//hint: Hint::Highlight,
//next: Next::SelectSome(1),
next: Next::SelectAll,
},
traces: init_data
.traces
.iter()
.map(|init| match init {
InitTrace::Ellipse(x, y, w, h) => {
let mut transform = Transform::identity();
transform.set_translation_2d(*x, *y);
Trace {
shape: TraceShape::Ellipse(*w, *h),
transform,
kind: TraceKind::Regular,
audio: Some(Audio {
id: AudioId(Uuid::parse_str(AUDIO_UUID).unwrap_ji()),
lib: MediaLibrary::User,
}),
text: Some("hello world!".to_string()),
}
}
})
.collect(),
base: BaseContent {
theme: ThemeId::Chalkboard,
instructions: Instructions {
text: Some("Heya World!".to_string()),
..Instructions::default()
},
stickers: init_data
.stickers
.iter()
.map(|init| match init {
InitSticker::Text => {
Sticker::Text(Text::new(DEBUG_TEXT.to_string()))
}
InitSticker::Sprite => Sticker::Sprite(Sprite::new(Image {
id: ImageId(Uuid::parse_str(IMAGE_UUID).unwrap_ji()),
lib: MediaLibrary::Global,
})),
})
.collect(),
backgrounds: Backgrounds {
layer_1: None, //Some(Background::Color(hex_to_rgba8("#ff0000"))),
layer_2: None,
},
},
..Content::default()
}),
}
} else {
RawData { content: None }
}),
skip_load_jig: true,
}
}
}
pub fn init(jig_id: JigId, _module_id: ModuleId) {
if jig_id == JigId(Uuid::from_u128(0)) {
SETTINGS
.set(DebugSettings::debug(Some(InitData {
stickers: vec![
InitSticker::Text, // InitSticker::Sprite
],
traces: vec![
InitTrace::Ellipse(0.3, 0.4, 0.2, 0.1),
InitTrace::Ellipse(0.1, 0.1, 0.1, 0.1),
],
})))
.unwrap_ji();
//SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();
} else {
SETTINGS.set(DebugSettings::default()).unwrap_ji();
}
}
pub fn settings() -> &'static DebugSettings {
unsafe { SETTINGS.get_unchecked() }
}
| 37.379085 | 134 | 0.404966 |
67106df49cb2f394b7b83548301e87cef02e926d | 1,932 | use crate::mesh::internal::greedy_mesh::should_create_face::should_create_face;
use crate::mesh::{BlockDescriptor, Face, FaceDirection};
use crate::{BlockOffset, Chunk};
use std::fmt::Debug;
/// # Panics
#[inline]
pub fn merge_face<
T: Debug + Send + Sync,
TE: Sync + Send + Clone + PartialEq + Debug,
C: Send + Sync + Fn(&T) -> Option<BlockDescriptor>,
FC: Send + Sync + Fn(&BlockOffset<SIZE>, &TE, bool) -> Face<TE, SIZE>,
TEC: Send + Sync + Fn(&T, FaceDirection) -> TE,
const SIZE: usize,
>(
chunk: &Chunk<T, SIZE>,
describe_callback: &C,
lines: &mut Vec<Face<TE, SIZE>>,
lines_transparent: &mut Vec<Face<TE, SIZE>>,
position: &BlockOffset<SIZE>,
neighbour_position: Option<BlockOffset<SIZE>>,
mut current_face: &mut Option<Face<TE, SIZE>>,
face_callback: FC,
texture_callback: &TEC,
face_direction: FaceDirection,
) {
let block = chunk.get(position);
if let Some(descriptor) = describe_callback(block) {
if should_create_face(chunk, &describe_callback, neighbour_position) {
if let Some(face) = &mut current_face {
face.extend_row_by_one();
} else {
current_face.replace(face_callback(
position,
&texture_callback(block, face_direction),
descriptor.is_transparent,
));
}
} else if let Some(face) = current_face.take() {
// If next block won't have a face in this direction
if descriptor.is_transparent {
lines_transparent.push(face);
} else {
lines.push(face);
}
}
} else if let Some(face) = current_face.take() {
// If we reached air we can end the face
if face.is_transparent {
lines_transparent.push(face);
} else {
lines.push(face);
}
}
}
| 34.5 | 79 | 0.581263 |
d7e813e99ab63a3e4b236e57b778ec669cd9aa28 | 5,911 | use super::*;
#[test]
fn without_byte_bitstring_or_list_element_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::binary::sub::with_bit_count(2, arc_process.clone()),
is_not_byte_bitstring_nor_list(arc_process.clone()),
)
.prop_map(|(arc_process, head, tail)| {
(
arc_process.clone(),
arc_process.cons(head, tail).unwrap(),
tail,
)
})
},
|(arc_process, bitstring_list, element)| {
prop_assert_badarg!(
native(&arc_process, bitstring_list),
element_context(bitstring_list, element)
);
Ok(())
},
);
}
#[test]
fn with_empty_list_returns_bitstring() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::binary::sub::with_bit_count(2, arc_process.clone()),
)
.prop_map(|(arc_process, head)| {
(
arc_process.clone(),
arc_process.cons(head, Term::NIL).unwrap(),
head,
)
})
},
|(arc_process, list, bitstring)| {
prop_assert_eq!(native(&arc_process, list), Ok(bitstring));
Ok(())
},
);
}
#[test]
fn with_byte_tail_errors_badarg() {
with_tail_errors_badarg(|process| process.integer(2).unwrap())
}
#[test]
fn with_proper_list_returns_binary() {
with(|head, process| {
let tail_head = process.integer(254).unwrap();
let tail_tail = Term::NIL;
let tail = process.cons(tail_head, tail_tail).unwrap();
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 255, 2 :: 2, &process))
);
})
}
#[test]
fn with_heap_binary_returns_binary() {
with(|head, process| {
let tail = process.binary_from_bytes(&[254, 253]).unwrap();
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 255, 191, 1 :: 2, &process))
);
})
}
#[test]
fn with_subbinary_with_bit_count_0_returns_binary() {
with(|head, process| {
let original = process.binary_from_bytes(&[2]).unwrap();
let tail = process
.subbinary_from_original(original, 0, 0, 1, 0)
.unwrap();
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 2 :: 2, &process))
);
});
}
#[test]
fn with_subbinary_with_bit_count_1_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(2, 0b1 :: 1, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 5 :: 3, &process))
);
});
}
#[test]
fn with_subbinary_with_bit_count_2_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(0b0000_0010, 0b11 :: 2, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 11 :: 4, &process))
);
});
}
#[test]
fn with_subbinary_with_bit_count_3_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(0b0000_0010, 0b101 :: 3, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 21 :: 5, &process))
);
});
}
#[test]
fn with_subbinary_with_bit_count_4_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(0b0000_0010, 0b0101 :: 4, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 37 :: 6, &process))
);
});
}
#[test]
fn with_subbinary_with_bit_count_5_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(0b0000_0010, 0b10101 :: 5, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 85 :: 7, &process))
);
});
}
#[test]
fn with_subbinary_with_bit_count_6_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(0b0000_0010, 0b010101 :: 6, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(process.binary_from_bytes(&[1, 192, 149]).unwrap())
);
});
}
#[test]
fn with_subbinary_with_bit_count_7_returns_subbinary() {
with(|head, process| {
let tail = bitstring!(0b0000_0010, 0b1010101 :: 7, &process);
let iolist = process.cons(head, tail).unwrap();
assert_eq!(
native(process, iolist),
Ok(bitstring!(1, 192, 170, 1 :: 1, &process)),
)
});
}
fn with_tail_errors_badarg<T>(tail: T)
where
T: FnOnce(&Process) -> Term,
{
with(|head, process| {
let tail = tail(&process);
let bitstring_list = process.cons(head, tail).unwrap();
assert_badarg!(
native(process, bitstring_list),
format!(
"bitstring_list ({}) tail ({}) cannot be a byte",
bitstring_list, tail
)
);
});
}
fn with<F>(f: F)
where
F: FnOnce(Term, &Process) -> (),
{
with_process(|process| {
let head = bitstring!(1, 0b11 :: 2, &process);
f(head, &process);
})
}
| 26.154867 | 84 | 0.528675 |
3aba77a1442254e56f2e025ea92ffeaf10a9590b | 371 | #![cfg_attr(feature = "priv_raw_ref", feature(raw_ref_op))]
mod misc_tests_submod {
mod accessing_struct_fields;
mod aligned_struct_offsets;
mod derive_macro;
mod ext_traits;
mod from_examples;
mod get_field_offset_trait;
mod misc_fieldoffsets_methods;
mod off_macros;
mod packed_struct_offsets;
mod struct_field_offsets_macro;
}
| 24.733333 | 59 | 0.749326 |
e520a27d9633d5e4ab07ea61e909c55c80095f35 | 901 | use std::num::NonZeroU64;
#[derive(Debug)]
pub(crate) struct BoxHeader {
pub(crate) magic_bytes: [u8; 4],
pub(crate) version: u32,
pub(crate) alignment: u64,
pub(crate) trailer: Option<NonZeroU64>,
}
// Make some attempt to not accidentally load plain text files,
// and also make it break almost immediately in any UTF-8 compliant text parser.
pub(crate) const MAGIC_BYTES: &[u8; 4] = b"\xffBOX";
impl BoxHeader {
pub(crate) fn new(trailer: Option<NonZeroU64>) -> BoxHeader {
BoxHeader {
magic_bytes: *MAGIC_BYTES,
version: 0x0,
alignment: 0,
trailer,
}
}
pub(crate) fn with_alignment(alignment: u64) -> BoxHeader {
BoxHeader {
alignment,
..Default::default()
}
}
}
impl Default for BoxHeader {
fn default() -> Self {
BoxHeader::new(None)
}
}
| 23.710526 | 80 | 0.594895 |
48d9cd95efefa6755827d5011dbb3b69bcfed367 | 6,209 | use crate::{
header::{HeaderKey, HeaderValue},
Headers, StatusCode,
};
#[derive(Debug)]
enum ProtocolData<'a> {
Ref(&'a str),
Owned(String),
}
impl<'a> AsRef<str> for ProtocolData<'a> {
fn as_ref(&self) -> &str {
match self {
Self::Ref(tmp) => tmp,
Self::Owned(tmp) => &tmp,
}
}
}
impl<'a> PartialEq for ProtocolData<'a> {
fn eq(&self, other: &ProtocolData<'_>) -> bool {
self.as_ref().eq(other.as_ref())
}
}
/// Represents a single HTTP-Request
#[derive(Debug, PartialEq)]
pub struct Response<'a> {
status_code: StatusCode,
protocol: ProtocolData<'a>,
headers: Headers<'a>,
body: Vec<u8>,
}
impl<'a> Response<'a> {
/// Creates a new Response with the given
/// Data as its inital State
pub fn new(
protocol: &'a str,
status_code: StatusCode,
headers: Headers<'a>,
body: Vec<u8>,
) -> Self {
Self {
status_code,
protocol: ProtocolData::Ref(protocol),
headers,
body,
}
}
/// Creates a new Response that owns all of
/// its Data
pub(crate) fn new_owned(
protocol: String,
status_code: StatusCode,
headers: Headers<'a>,
body: Vec<u8>,
) -> Self {
Self {
status_code,
protocol: ProtocolData::Owned(protocol),
headers,
body,
}
}
/// Serialzes the Response and returns the Data as
/// a tuple of form (HTTP-Head, HTTP-Body)
pub fn serialize(&self) -> (Vec<u8>, &[u8]) {
let protocol = self.protocol.as_ref();
let status_code = self.status_code.serialize();
let capacity = protocol.len() + 1 + status_code.len() + 4;
let mut result = Vec::with_capacity(capacity);
// The first line with method, path, protocol
result.extend_from_slice(protocol.as_bytes());
result.push(b' ');
result.extend_from_slice(status_code.as_bytes());
result.extend_from_slice("\r\n".as_bytes());
// The headers
self.headers.serialize(&mut result);
// The ending of the head
result.extend_from_slice("\r\n".as_bytes());
(result, &self.body)
}
/// Returns the Protocol of the Response
pub fn protocol(&self) -> &str {
self.protocol.as_ref()
}
/// Returns the StatusCode of the Response
pub fn status_code(&self) -> &StatusCode {
&self.status_code
}
/// Returns the Headers of the Response
pub fn headers(&self) -> &Headers<'a> {
&self.headers
}
/// Returns the Body of the Response
pub fn body(&self) -> &[u8] {
&self.body
}
/// Adds the Key-Value Pair as a new Header to
/// the Response or replaces the old Value of the
/// Header if it already existed on the Response
pub fn add_header<'b, K, V>(&mut self, key: K, value: V)
where
'b: 'a,
K: Into<HeaderKey<'a>>,
V: Into<HeaderValue<'a>>,
{
self.headers.set(key, value);
}
/// Replaces the old Body of the Response with the
/// new given Body and updates the Content-Length
/// Header as well with the new Length
pub fn set_body(&mut self, n_body: Vec<u8>) {
self.body = n_body;
self.add_header("Content-Length", self.body.len());
}
/// Checks if the Response is send using
/// `Transfer-Encoding: Chunked`
pub fn is_chunked(&self) -> bool {
match self.headers.get("Transfer-Encoding") {
None => false,
Some(value) => value.eq_ignore_case(&HeaderValue::StrRef("Chunked")),
}
}
/// Clones the entire Response to produce a new indepandent
/// Response
pub fn to_owned<'refed, 'owned>(&'refed self) -> Response<'owned> {
Response::new_owned(
self.protocol.as_ref().to_owned(),
self.status_code.clone(),
self.headers.to_owned(),
self.body.clone(),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn serialize_valid() {
let mut headers = Headers::new();
headers.set("test-1", "value-1");
let req = Response::new(
"HTTP/1.1",
StatusCode::OK,
headers,
"body".as_bytes().to_vec(),
);
let raw_resp_header = "HTTP/1.1 200 OK\r\ntest-1: value-1\r\n\r\n";
let resp_header = raw_resp_header.as_bytes().to_vec();
let resp_body = "body".as_bytes();
assert_eq!(req.serialize(), (resp_header, resp_body));
}
#[test]
fn serialize_valid_no_body() {
let mut headers = Headers::new();
headers.set("test-1", "value-1");
let req = Response::new("HTTP/1.1", StatusCode::OK, headers, "".as_bytes().to_vec());
let raw_resp_header = "HTTP/1.1 200 OK\r\ntest-1: value-1\r\n\r\n";
let resp_header = raw_resp_header.as_bytes().to_vec();
let resp_body = "".as_bytes();
assert_eq!(req.serialize(), (resp_header, resp_body));
}
#[test]
fn is_chunked_not_set() {
let mut headers = Headers::new();
headers.set("test-1", "value-1");
let resp = Response::new("HTTP/1.1", StatusCode::OK, headers, "".as_bytes().to_vec());
assert_eq!(false, resp.is_chunked());
}
#[test]
fn is_chunked_set() {
let mut headers = Headers::new();
headers.set("Transfer-Encoding", "Chunked");
let resp = Response::new("HTTP/1.1", StatusCode::OK, headers, "".as_bytes().to_vec());
assert_eq!(true, resp.is_chunked());
}
#[test]
fn is_chunked_set_differently() {
let mut headers = Headers::new();
headers.set("Transfer-Encoding", "compress");
let resp = Response::new("HTTP/1.1", StatusCode::OK, headers, "".as_bytes().to_vec());
assert_eq!(false, resp.is_chunked());
}
#[test]
fn to_owned() {
let resp = Response::new("HTTP/1.1", StatusCode::OK, Headers::new(), Vec::new());
let cloned = resp.to_owned();
drop(resp);
assert_eq!(&StatusCode::OK, cloned.status_code())
}
}
| 27.595556 | 94 | 0.560477 |
f8ccb3d73a82ce9b5571c95f4e616eed6076550d | 9,485 | use std::{
io::{stdin, stdout, BufRead, Write},
path::{Path, PathBuf},
};
use isahc::{
auth::{Authentication, Credentials},
config::Configurable,
http::StatusCode,
HttpClient, ReadResponseExt, Request,
};
use itertools::Itertools;
use semver::Version;
use serde::{de::IgnoredAny, Deserialize};
use serde_json::json;
use toml::from_str as from_toml_str;
use xshell::{pushd, read_file};
use crate::{cmd, config, Result};
const CRATESIO_API: &str = "https://crates.io/api/v1/crates";
const GITHUB_API_RUMA: &str = "https://api.github.com/repos/ruma/ruma";
/// Task to create a new release of the given crate.
pub struct ReleaseTask {
/// The crate to release.
name: String,
/// The root of the workspace.
project_root: PathBuf,
/// The version to release.
version: Version,
/// The http client to use for requests.
client: HttpClient,
}
impl ReleaseTask {
/// Create a new `ReleaseTask` with the given `name` and `project_root`.
pub(crate) fn new(name: String, project_root: PathBuf) -> Result<Self> {
let path = project_root.join(&name);
let version = Self::get_version(&path)?;
Ok(Self { name, project_root, version, client: HttpClient::new()? })
}
/// Run the task to effectively create a release.
pub(crate) fn run(self) -> Result<()> {
println!("Starting release for {} {}…", self.name, self.version);
if self.is_released()? {
return Err("This version is already released".into());
}
let _dir = pushd(self.crate_path())?;
let remote = Self::git_remote()?;
println!("Checking status of git repository…");
if !cmd!("git status -s -uno").read()?.is_empty()
&& !Self::ask_continue("This git repository contains untracked files. Continue?")?
{
return Ok(());
}
println!("Publishing the package on crates.io…");
if self.is_published()?
&& !Self::ask_continue(
"This version is already published. Skip this step and continue?",
)?
{
return Ok(());
} else {
cmd!("cargo publish").run()?;
}
let changes = &self.get_changes()?;
let tag = &self.tag_name();
let title = &self.title();
println!("Creating git tag…");
if cmd!("git tag -l {tag}").read()?.is_empty() {
cmd!("git tag -s {tag} -m {title} -m {changes}").read()?;
} else if !Self::ask_continue("This tag already exists. Skip this step and continue?")? {
return Ok(());
}
println!("Pushing tag to remote repository…");
if cmd!("git ls-remote --tags {remote} {tag}").read()?.is_empty() {
cmd!("git push {remote} {tag}").run()?;
} else if !Self::ask_continue(
"This tag has already been pushed. Skip this step and continue?",
)? {
return Ok(());
}
println!("Creating release on GitHub…");
let request_body = &json!({
"tag_name": tag,
"name": title,
"body": changes.trim_softbreaks(),
})
.to_string();
self.release(request_body)?;
println!("Release created successfully!");
Ok(())
}
/// Ask the user if he wants to skip this step and continue. Returns `true` for yes.
fn ask_continue(message: &str) -> Result<bool> {
let mut input = String::new();
let stdin = stdin();
print!("{} [y/N]: ", message);
stdout().flush()?;
let mut handle = stdin.lock();
handle.read_line(&mut input)?;
input = input.trim().to_ascii_lowercase();
Ok(input == "y" || input == "yes")
}
/// Get the changes of the given version from the changelog.
fn get_changes(&self) -> Result<String> {
let changelog = read_file(self.crate_path().join("CHANGELOG.md"))?;
let lines_nb = changelog.lines().count();
let mut lines = changelog.lines();
let start = match lines.position(|l| l.starts_with(&format!("# {}", self.version))) {
Some(p) => p + 1,
None => {
return Err("Could not find version title in changelog".into());
}
};
let length = match lines.position(|l| l.starts_with("# ")) {
Some(p) => p,
None => lines_nb,
};
let changes = changelog.lines().skip(start).take(length).join("\n");
Ok(changes.trim().to_owned())
}
/// Get the title of this release.
fn title(&self) -> String {
format!("{} {}", self.name, self.version)
}
/// Get the path of the crate for this release.
fn crate_path(&self) -> PathBuf {
self.project_root.join(&self.name)
}
/// Load the GitHub config from the config file.
fn git_remote() -> Result<String> {
let branch = cmd!("git rev-parse --abbrev-ref HEAD").read()?;
let remote = cmd!("git config branch.{branch}.remote").read()?;
if remote.is_empty() {
return Err("Could not get current git remote".into());
}
Ok(remote)
}
/// Get the tag name for this release.
fn tag_name(&self) -> String {
format!("{}-{}", self.name, self.version)
}
/// Get the current version of the crate at `path` from its manifest.
fn get_version(path: &Path) -> Result<Version> {
let manifest_toml = read_file(path.join("Cargo.toml"))?;
let manifest: CargoManifest = from_toml_str(&manifest_toml)?;
Ok(manifest.package.version)
}
/// Check if the current version of the crate is published on crates.io.
fn is_published(&self) -> Result<bool> {
let response: CratesIoCrate =
self.client.get(format!("{}/{}/{}", CRATESIO_API, self.name, self.version))?.json()?;
Ok(response.version.is_some())
}
/// Check if the tag for the current version of the crate has been pushed on GitHub.
fn is_released(&self) -> Result<bool> {
let response =
self.client.get(format!("{}/releases/tags/{}", GITHUB_API_RUMA, self.tag_name()))?;
Ok(response.status() == StatusCode::OK)
}
/// Create the release on GitHub with the given `config` and `credentials`.
fn release(&self, body: &str) -> Result<()> {
let config = config()?.github;
let request = Request::post(format!("{}/releases", GITHUB_API_RUMA))
.authentication(Authentication::basic())
.credentials(Credentials::new(config.user, config.token))
.header("Accept", "application/vnd.github.v3+json")
.body(body)?;
let mut response = self.client.send(request)?;
if response.status() == StatusCode::CREATED {
Ok(())
} else {
Err(format!("{}: {}", response.status(), response.text()?).into())
}
}
}
/// The required cargo manifest data of a crate.
#[derive(Debug, Deserialize)]
struct CargoManifest {
/// The package information.
package: CargoPackage,
}
/// The required package information from a crate's cargo manifest.
#[derive(Debug, Deserialize)]
struct CargoPackage {
/// The package version.
version: Version,
}
/// A crate from the `GET /crates/{crate}` endpoint of crates.io.
#[derive(Deserialize)]
struct CratesIoCrate {
version: Option<IgnoredAny>,
}
/// A tag from the `GET /repos/{owner}/{repo}/tags` endpoint of GitHub REST API.
#[derive(Debug, Deserialize)]
struct GithubTag {
/// The name of the tag.
name: String,
}
/// String manipulations for crate release.
trait StrExt {
/// Remove soft line breaks as defined in CommonMark spec.
fn trim_softbreaks(&self) -> String;
}
impl StrExt for str {
fn trim_softbreaks(&self) -> String {
let mut string = String::new();
let mut s = self;
while let Some(pos) = s.find('\n') {
string.push_str(&s[..pos]);
let pos_s = &s[pos..];
if pos_s.starts_with("\n\n") {
// Keep new paragraphs (multiple `\n`s).
let next = pos_s.find(|c: char| c != '\n').unwrap_or(0);
let (push, next_s) = pos_s.split_at(next);
string.push_str(push);
s = next_s;
} else if s[..pos].ends_with(" ") || s[..pos].ends_with('\\') {
// Keep hard line breaks (two spaces or a backslash before the line break).
string.push('\n');
s = &pos_s[1..];
} else if let Some(p) = pos_s.find(|c: char| !c.is_ascii_whitespace()) {
// Keep line break before list items (`\n` + whitespaces + `*` + whitespaces).
// Remove line break and keep one space otherwise.
let mut chars = pos_s.char_indices();
let (_, char) = chars.find(|(i, _)| *i == p).unwrap();
if char == '*' || char == '-' {
match chars.next() {
Some((_, next_char)) if next_char.is_ascii_whitespace() => {
string.push('\n');
s = &pos_s[1..];
continue;
}
_ => {}
}
}
string.push(' ');
s = &pos_s[p..];
}
}
string + s
}
}
| 31.30363 | 97 | 0.548761 |
0aaee72c67866e7c0e6767b0d29c2e6b24a45add | 4,237 | use crate::Python;
use std::cell::UnsafeCell;
/// A write-once cell similar to [`once_cell::OnceCell`](https://docs.rs/once_cell/1.4.0/once_cell/).
///
/// Unlike `once_cell::sync` which blocks threads to achieve thread safety, this implementation
/// uses the Python GIL to mediate concurrent access. This helps in cases where `once_sync` or
/// `lazy_static`'s synchronization strategy can lead to deadlocks when interacting with the Python
/// GIL. For an example, see [the FAQ section](https://pyo3.rs/main/faq.html) of the guide.
///
/// # Examples
///
/// The following example shows how to use `GILOnceCell` to share a reference to a Python list
/// between threads:
///
/// ```
/// use pyo3::prelude::*;
/// use pyo3::types::PyList;
/// use pyo3::once_cell::GILOnceCell;
///
/// static LIST_CELL: GILOnceCell<Py<PyList>> = GILOnceCell::new();
///
/// pub fn get_shared_list(py: Python) -> &PyList {
/// LIST_CELL
/// .get_or_init(py, || PyList::empty(py).into())
/// .as_ref(py)
/// }
/// # Python::with_gil(|py| assert_eq!(get_shared_list(py).len(), 0));
/// ```
#[allow(clippy::upper_case_acronyms)]
pub struct GILOnceCell<T>(UnsafeCell<Option<T>>);
// T: Send is needed for Sync because the thread which drops the GILOnceCell can be different
// to the thread which fills it.
unsafe impl<T: Send + Sync> Sync for GILOnceCell<T> {}
unsafe impl<T: Send> Send for GILOnceCell<T> {}
impl<T> GILOnceCell<T> {
/// Create a `GILOnceCell` which does not yet contain a value.
pub const fn new() -> Self {
Self(UnsafeCell::new(None))
}
/// Get a reference to the contained value, or `None` if the cell has not yet been written.
pub fn get(&self, _py: Python) -> Option<&T> {
// Safe because if the cell has not yet been written, None is returned.
unsafe { &*self.0.get() }.as_ref()
}
/// Get a reference to the contained value, initializing it if needed using the provided
/// closure.
///
/// Note that:
/// 1) reentrant initialization can cause a stack overflow.
/// 2) if f() temporarily releases the GIL (e.g. by calling `Python::import`) then it is
/// possible (and well-defined) that a second thread may also call get_or_init and begin
/// calling `f()`. Even when this happens `GILOnceCell` guarantees that only **one** write
/// to the cell ever occurs - other threads will simply discard the value they compute and
/// return the result of the first complete computation.
/// 3) if f() does not release the GIL and does not panic, it is guaranteed to be called
/// exactly once, even if multiple threads attempt to call `get_or_init`
/// 4) if f() can panic but still does not release the GIL, it may be called multiple times,
/// but it is guaranteed that f() will never be called concurrently
pub fn get_or_init<F>(&self, py: Python, f: F) -> &T
where
F: FnOnce() -> T,
{
let inner = unsafe { &*self.0.get() }.as_ref();
if let Some(value) = inner {
return value;
}
// Note that f() could temporarily release the GIL, so it's possible that another thread
// writes to this GILOnceCell before f() finishes. That's fine; we'll just have to discard
// the value computed here and accept a bit of wasted computation.
let value = f();
let _ = self.set(py, value);
self.get(py).unwrap()
}
/// Get the contents of the cell mutably. This is only possible if the reference to the cell is
/// unique.
pub fn get_mut(&mut self) -> Option<&mut T> {
// Safe because we have &mut self
unsafe { &mut *self.0.get() }.as_mut()
}
/// Set the value in the cell.
///
/// If the cell has already been written, `Err(value)` will be returned containing the new
/// value which was not written.
pub fn set(&self, _py: Python, value: T) -> Result<(), T> {
// Safe because GIL is held, so no other thread can be writing to this cell concurrently.
let inner = unsafe { &mut *self.0.get() };
if inner.is_some() {
return Err(value);
}
*inner = Some(value);
Ok(())
}
}
| 40.740385 | 101 | 0.632523 |
90cb16d021c550fa433ae34230e43dde1de393af | 5,745 | //! # Brutally slightly better hash validation - smaller timing leak
//! use an insecure compare, that adds 5ms per character and returns early
//!
//! ## How to go about this
//! Basically, I want do the same thing as before.
//! But also repeat the measurements to try and remove the noise caused by the rest of the machine.
//!
//! Sidenote: I am very happy that I brought down the runtime for the previous challenge.
//! Waiting 40 min to find out if a certain implementation works would be awful.
//! (Well not 40 min since the sleep has been reduced by 10x, but you know what I mean)
//!
//! ## Obeservations about timings
//! 1. If you do the same value 3 times in a row before going to the next one, the influence of the machine is not spread enough.
//! Usually, slowdown occurs over some period of time, if all 3 measurements fall into it, the noise is always equally in there.
//! Therefore I use `0..256,0..256,0..256` instead of `0,0,0,1,1,1,...,256,256,256`
//! 2. For similar reasons, just taking the minimum duration does not really work.
//! If the slower better match has less contention than a faster worse match, the slowest of the latter can still be slower than the better match.
//! 3. parallelism makes it worse... solution that was successful with no parallelism failed before pos 10
//! 4. If at first you don't succeed, throw a few more probes/repetitions at it.
//! 5. Points 1 & 3 are no longer relevant if you do point 4 enough
//!
//! ## So in the end
//! I just keep increasing the repetitions until it works.
//! Non-parallel probing did not work reliably either and was a whole lot slower.
//! So I turned the parallel iterator back on and just increased the number.
//!
//! At about 13 probes per option, i recovered the mac in 2 out of 3 tries in under 20 seconds each.
//! And the overall ordering of the probes did not matter, so i refactored and result stayed the same:
//!
//! ```diff
//! (0..u8::MAX).into_par_iter()
//! - .chain(0..u8::MAX).chain(0..u8::MAX)
//! - .chain(0..u8::MAX).chain(0..u8::MAX).chain(0..u8::MAX).chain(0..u8::MAX)
//! - .chain(0..u8::MAX).chain(0..u8::MAX).chain(0..u8::MAX).chain(0..u8::MAX)
//! + .flat_map(|byte| vec!(byte; 13))
//! ```
//! I declare the challenge solved :D
use std::collections::HashMap;
use std::thread::sleep;
use std::time::{Duration, Instant};
use rayon::prelude::*;
use rayon::ThreadPoolBuilder;
use cyptopals::sha1::MySha1;
use cyptopals::{random_128_bit, u32_be_bytes};
fn bruteforce(mac_len: usize, call: &mut (dyn Fn(&Vec<u8>) -> bool + Sync)) -> Vec<u8> {
let mut mac = vec![0u8; mac_len];
for i in 0..mac_len {
let start = Instant::now();
let options = time_options(call, &mut mac, i);
let byte = select_best_option(options).expect("there has to be one option");
println!("{}: final {} in {:?}", i, byte, start.elapsed());
mac[i] = byte;
}
mac
}
fn time_options(
call: &mut (dyn Fn(&Vec<u8>) -> bool + Sync),
mac: &mut Vec<u8>,
i: usize,
) -> Vec<(u8, u32)> {
const REPEAT: usize = 13;
(0..u8::MAX)
.into_par_iter()
.flat_map(|byte| vec![byte; REPEAT])
.map(|possible_byte| {
let mut par_mac = mac.clone();
par_mac[i] = possible_byte;
let start = Instant::now();
if call(&par_mac) {
// can't use max, since they are added while selecting the best
return (possible_byte, u32::MAX >> 4);
}
let duration = start.elapsed();
(possible_byte, duration.as_millis() as u32)
})
.collect()
}
/// just take the smallest duration for now
fn select_best_option(options: Vec<(u8, u32)>) -> Option<u8> {
// let mut m: HashMap<u8, u32> = (u8::MIN..u8::MAX).zip(vec![u32::MAX; 256]).collect();
let mut m: HashMap<u8, u32> = (u8::MIN..u8::MAX).zip(vec![0u32; 256]).collect();
for (byte, duration) in options {
let tmp = m[&byte] + duration;
m.insert(byte, tmp);
}
let mut v: Vec<(u8, u32)> = m.into_iter().collect();
v.sort_by_key(|x| u32::MAX - x.1);
println!("Options: {:?}", v);
println!("Options: {:?}", &v[0..3]);
Some(v.get(0).expect("at least 1").0)
}
fn insecure_equals(a: &Vec<u8>, b: &Vec<u8>) -> bool {
if a.len() != b.len() {
return false;
}
for i in 0..a.len() {
sleep(Duration::from_millis(5));
if a[i] != b[i] {
return false;
}
}
return true;
}
fn main() {
// give rayon 256 threads, one for each option of u8.
// This is not a problem, since the majority of the time is spent waiting for sleeps to finish
ThreadPoolBuilder::new()
.num_threads(256)
.build_global()
.expect("should succeed");
let key = random_128_bit();
let data = b"malicious file".to_vec();
let correct_mac = u32_be_bytes(&MySha1::hmac(&key, &data));
println!("target: {:?}", correct_mac);
let start = Instant::now();
let bf_mac = bruteforce(20, &mut |mac: &Vec<u8>| {
insecure_equals(&u32_be_bytes(&MySha1::hmac(&key, &data)), mac)
});
println!("took: {:?}", start.elapsed());
println!("should be: {:?}", correct_mac);
println!("was : {:?}", bf_mac);
assert_eq!(bf_mac, correct_mac)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
#[ignore] // pretty flaky test
fn test_with_very_short_mac() {
ThreadPoolBuilder::new()
.num_threads(256)
.build_global()
.expect("should succeed");
let test_mac = vec![5u8; 3];
let bf_mac = bruteforce(3, &mut |mac: &Vec<u8>| insecure_equals(&test_mac, mac));
assert_eq!(bf_mac, test_mac)
}
}
| 38.3 | 149 | 0.613055 |
14249171e9affaceb82e17cfd87d24d13d0070a4 | 5,231 | // Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
pub(crate) mod cache_info;
mod fdt;
/// Module for the global interrupt controller configuration.
pub mod gic;
/// Layout for this aarch64 system.
pub mod layout;
/// Logic for configuring aarch64 registers.
pub mod regs;
use std::cmp::min;
use std::collections::HashMap;
use std::fmt::Debug;
pub use self::fdt::DeviceInfoForFDT;
use self::gic::GICDevice;
use crate::DeviceType;
use vm_memory::{Address, GuestAddress, GuestMemory, GuestMemoryMmap};
/// Errors thrown while configuring aarch64 system.
#[derive(Debug)]
pub enum Error {
/// Failed to create a Flattened Device Tree for this aarch64 microVM.
SetupFDT(fdt::Error),
/// Failed to compute the initrd address.
InitrdAddress,
}
/// The start of the memory area reserved for MMIO devices.
pub const MMIO_MEM_START: u64 = layout::MAPPED_IO_START;
/// The size of the memory area reserved for MMIO devices.
pub const MMIO_MEM_SIZE: u64 = layout::DRAM_MEM_START - layout::MAPPED_IO_START; //>> 1GB
/// Returns a Vec of the valid memory addresses for aarch64.
/// See [`layout`](layout) module for a drawing of the specific memory model for this platform.
pub fn arch_memory_regions(size: usize) -> Vec<(GuestAddress, usize)> {
let dram_size = min(size as u64, layout::DRAM_MEM_MAX_SIZE) as usize;
vec![(GuestAddress(layout::DRAM_MEM_START), dram_size)]
}
/// Configures the system and should be called once per vm before starting vcpu threads.
/// For aarch64, we only setup the FDT.
///
/// # Arguments
///
/// * `guest_mem` - The memory to be used by the guest.
/// * `cmdline_cstring` - The kernel commandline.
/// * `vcpu_mpidr` - Array of MPIDR register values per vcpu.
/// * `device_info` - A hashmap containing the attached devices for building FDT device nodes.
/// * `gic_device` - The GIC device.
/// * `initrd` - Information about an optional initrd.
pub fn configure_system<T: DeviceInfoForFDT + Clone + Debug, S: std::hash::BuildHasher>(
guest_mem: &GuestMemoryMmap,
cmdline_cstring: &str,
vcpu_mpidr: Vec<u64>,
device_info: &HashMap<(DeviceType, String), T, S>,
gic_device: &dyn GICDevice,
initrd: &Option<super::InitrdConfig>,
) -> super::Result<()> {
fdt::create_fdt(
guest_mem,
vcpu_mpidr,
cmdline_cstring,
device_info,
gic_device,
initrd,
)
.map_err(Error::SetupFDT)?;
Ok(())
}
/// Returns the memory address where the kernel could be loaded.
pub fn get_kernel_start() -> u64 {
layout::DRAM_MEM_START
}
/// Returns the memory address where the initrd could be loaded.
pub fn initrd_load_addr(guest_mem: &GuestMemoryMmap, initrd_size: usize) -> super::Result<u64> {
let round_to_pagesize = |size| (size + (super::PAGE_SIZE - 1)) & !(super::PAGE_SIZE - 1);
match GuestAddress(get_fdt_addr(&guest_mem)).checked_sub(round_to_pagesize(initrd_size) as u64)
{
Some(offset) => {
if guest_mem.address_in_range(offset) {
Ok(offset.raw_value())
} else {
Err(Error::InitrdAddress)
}
}
None => Err(Error::InitrdAddress),
}
}
// Auxiliary function to get the address where the device tree blob is loaded.
fn get_fdt_addr(mem: &GuestMemoryMmap) -> u64 {
// If the memory allocated is smaller than the size allocated for the FDT,
// we return the start of the DRAM so that
// we allow the code to try and load the FDT.
if let Some(addr) = mem.last_addr().checked_sub(layout::FDT_MAX_SIZE as u64 - 1) {
if mem.address_in_range(addr) {
return addr.raw_value();
}
}
layout::DRAM_MEM_START
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_regions_lt_1024gb() {
let regions = arch_memory_regions(1usize << 29);
assert_eq!(1, regions.len());
assert_eq!(GuestAddress(super::layout::DRAM_MEM_START), regions[0].0);
assert_eq!(1usize << 29, regions[0].1);
}
#[test]
fn test_regions_gt_1024gb() {
let regions = arch_memory_regions(1usize << 41);
assert_eq!(1, regions.len());
assert_eq!(GuestAddress(super::layout::DRAM_MEM_START), regions[0].0);
assert_eq!(super::layout::DRAM_MEM_MAX_SIZE, regions[0].1 as u64);
}
#[test]
fn test_get_fdt_addr() {
let regions = arch_memory_regions(layout::FDT_MAX_SIZE - 0x1000);
let mem = vm_memory::test_utils::create_anon_guest_memory(®ions, false)
.expect("Cannot initialize memory");
assert_eq!(get_fdt_addr(&mem), layout::DRAM_MEM_START);
let regions = arch_memory_regions(layout::FDT_MAX_SIZE);
let mem = vm_memory::test_utils::create_anon_guest_memory(®ions, false)
.expect("Cannot initialize memory");
assert_eq!(get_fdt_addr(&mem), layout::DRAM_MEM_START);
let regions = arch_memory_regions(layout::FDT_MAX_SIZE + 0x1000);
let mem = vm_memory::test_utils::create_anon_guest_memory(®ions, false)
.expect("Cannot initialize memory");
assert_eq!(get_fdt_addr(&mem), 0x1000 + layout::DRAM_MEM_START);
}
}
| 35.344595 | 99 | 0.673103 |
e2154f58494cbf460b1c8472b9e5e3bdd70234ef | 1,194 | use crate::spec::{LinkerFlavor, LldFlavor, PanicStrategy, RelocModel};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target {
Target {
data_layout: "e-m:e-p:32:32-i64:64-n32-S128".to_string(),
llvm_target: "riscv32".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "32".to_string(),
target_c_int_width: "32".to_string(),
target_os: "none".to_string(),
target_env: String::new(),
target_vendor: "unknown".to_string(),
arch: "riscv32".to_string(),
linker_flavor: LinkerFlavor::Lld(LldFlavor::Ld),
options: TargetOptions {
linker: Some("rust-lld".to_string()),
cpu: "generic-rv32".to_string(),
max_atomic_width: Some(32),
atomic_cas: true,
features: "+m,+a,+c".to_string(),
executables: true,
panic_strategy: PanicStrategy::Abort,
relocation_model: RelocModel::Static,
emit_debug_gdb_scripts: false,
unsupported_abis: super::riscv_base::unsupported_abis(),
eh_frame_header: false,
..Default::default()
},
}
}
| 36.181818 | 70 | 0.59129 |
dd06f40a68249ae9b0b01d177f3674ce93f89b9d | 4,629 | #[doc = "Register `ENABLE` reader"]
pub struct R(crate::R<ENABLE_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<ENABLE_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<ENABLE_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<ENABLE_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `ENABLE` writer"]
pub struct W(crate::W<ENABLE_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<ENABLE_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<ENABLE_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<ENABLE_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Enable I2S module.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENABLE_A {
#[doc = "0: Disable"]
DISABLED = 0,
#[doc = "1: Enable"]
ENABLED = 1,
}
impl From<ENABLE_A> for bool {
#[inline(always)]
fn from(variant: ENABLE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ENABLE` reader - Enable I2S module."]
pub struct ENABLE_R(crate::FieldReader<bool, ENABLE_A>);
impl ENABLE_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
ENABLE_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ENABLE_A {
match self.bits {
false => ENABLE_A::DISABLED,
true => ENABLE_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == ENABLE_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == ENABLE_A::ENABLED
}
}
impl core::ops::Deref for ENABLE_R {
type Target = crate::FieldReader<bool, ENABLE_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ENABLE` writer - Enable I2S module."]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ENABLE_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(ENABLE_A::DISABLED)
}
#[doc = "Enable"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(ENABLE_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 0 - Enable I2S module."]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Enable I2S module."]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Enable I2S module.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [enable](index.html) module"]
pub struct ENABLE_SPEC;
impl crate::RegisterSpec for ENABLE_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [enable::R](R) reader structure"]
impl crate::Readable for ENABLE_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [enable::W](W) writer structure"]
impl crate::Writable for ENABLE_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets ENABLE to value 0"]
impl crate::Resettable for ENABLE_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 28.751553 | 405 | 0.578743 |
dd4cee5a4476ae1474954769045cd5eae94b67eb | 1,055 | use crate::opcode::Number;
pub(crate) fn sum_of_divisors(number: Number) -> Number {
let mut sum = 0;
let mut divisor = 1;
while divisor <= number {
if number % divisor == 0 {
sum += divisor;
}
divisor += 1;
}
sum
}
#[cfg(test)]
mod tests {
use super::*;
use crate::device::Device;
use line_reader::{read_file_to_lines, read_str_to_lines};
const EXAMPLE_PROGRAM: &str = "\
#ip 0
seti 5 0 1
seti 6 0 2
addi 0 1 0
addr 1 2 3
setr 1 0 0
seti 8 0 4
seti 9 0 5";
#[test]
fn run_example_program() {
let program = read_str_to_lines(EXAMPLE_PROGRAM);
assert_eq!(6, Device::default().run_program(&program));
}
#[test]
fn part1() {
let program = read_file_to_lines("input/day19.txt");
assert_eq!(1872, Device::default().run_program(&program));
}
#[test]
fn part2() {
assert_eq!(
1 + 2 + 5 + 10 + 1_055_143 + 2_110_286 + 5_275_715 + 10_551_430,
sum_of_divisors(10_551_430)
);
}
}
| 20.686275 | 76 | 0.56872 |
1d62b52ea7d52307060ed360289447c739257340 | 37,293 | #[doc = "Reader of register chen"]
pub type R = crate::R<u64, super::CHEN>;
#[doc = "Writer for register chen"]
pub type W = crate::W<u64, super::CHEN>;
#[doc = "Register chen `reset()`'s with value 0"]
impl crate::ResetValue for super::CHEN {
type Type = u64;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `ch1_en`"]
pub type CH1_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch1_en`"]
pub struct CH1_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u64) & 0x01);
self.w
}
}
#[doc = "Reader of field `ch2_en`"]
pub type CH2_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch2_en`"]
pub struct CH2_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u64) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `ch3_en`"]
pub type CH3_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch3_en`"]
pub struct CH3_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u64) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `ch4_en`"]
pub type CH4_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch4_en`"]
pub struct CH4_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u64) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `ch5_en`"]
pub type CH5_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch5_en`"]
pub struct CH5_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u64) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `ch6_en`"]
pub type CH6_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch6_en`"]
pub struct CH6_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u64) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `ch1_en_we`"]
pub type CH1_EN_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch1_en_we`"]
pub struct CH1_EN_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_EN_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u64) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `ch2_en_we`"]
pub type CH2_EN_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch2_en_we`"]
pub struct CH2_EN_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_EN_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u64) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `ch3_en_we`"]
pub type CH3_EN_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch3_en_we`"]
pub struct CH3_EN_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_EN_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u64) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `ch4_en_we`"]
pub type CH4_EN_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch4_en_we`"]
pub struct CH4_EN_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_EN_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u64) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `ch5_en_we`"]
pub type CH5_EN_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch5_en_we`"]
pub struct CH5_EN_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_EN_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u64) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `ch6_en_we`"]
pub type CH6_EN_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch6_en_we`"]
pub struct CH6_EN_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_EN_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u64) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `ch1_susp`"]
pub type CH1_SUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch1_susp`"]
pub struct CH1_SUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_SUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u64) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `ch2_susp`"]
pub type CH2_SUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch2_susp`"]
pub struct CH2_SUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_SUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u64) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `ch3_susp`"]
pub type CH3_SUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch3_susp`"]
pub struct CH3_SUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_SUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u64) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `ch4_susp`"]
pub type CH4_SUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch4_susp`"]
pub struct CH4_SUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_SUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u64) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `ch5_susp`"]
pub type CH5_SUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch5_susp`"]
pub struct CH5_SUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_SUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u64) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `ch6_susp`"]
pub type CH6_SUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch6_susp`"]
pub struct CH6_SUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_SUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u64) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `ch1_susp_we`"]
pub type CH1_SUSP_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch1_susp_we`"]
pub struct CH1_SUSP_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_SUSP_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u64) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `ch2_susp_we`"]
pub type CH2_SUSP_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch2_susp_we`"]
pub struct CH2_SUSP_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_SUSP_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u64) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `ch3_susp_we`"]
pub type CH3_SUSP_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch3_susp_we`"]
pub struct CH3_SUSP_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_SUSP_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u64) & 0x01) << 26);
self.w
}
}
#[doc = "Reader of field `ch4_susp_we`"]
pub type CH4_SUSP_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch4_susp_we`"]
pub struct CH4_SUSP_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_SUSP_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u64) & 0x01) << 27);
self.w
}
}
#[doc = "Reader of field `ch5_susp_we`"]
pub type CH5_SUSP_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch5_susp_we`"]
pub struct CH5_SUSP_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_SUSP_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u64) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `ch6_susp_we`"]
pub type CH6_SUSP_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch6_susp_we`"]
pub struct CH6_SUSP_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_SUSP_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u64) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `ch1_abort`"]
pub type CH1_ABORT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch1_abort`"]
pub struct CH1_ABORT_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_ABORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 32)) | (((value as u64) & 0x01) << 32);
self.w
}
}
#[doc = "Reader of field `ch2_abort`"]
pub type CH2_ABORT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch2_abort`"]
pub struct CH2_ABORT_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_ABORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 33)) | (((value as u64) & 0x01) << 33);
self.w
}
}
#[doc = "Reader of field `ch3_abort`"]
pub type CH3_ABORT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch3_abort`"]
pub struct CH3_ABORT_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_ABORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 34)) | (((value as u64) & 0x01) << 34);
self.w
}
}
#[doc = "Reader of field `ch4_abort`"]
pub type CH4_ABORT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch4_abort`"]
pub struct CH4_ABORT_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_ABORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 35)) | (((value as u64) & 0x01) << 35);
self.w
}
}
#[doc = "Reader of field `ch5_abort`"]
pub type CH5_ABORT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch5_abort`"]
pub struct CH5_ABORT_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_ABORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 36)) | (((value as u64) & 0x01) << 36);
self.w
}
}
#[doc = "Reader of field `ch6_abort`"]
pub type CH6_ABORT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch6_abort`"]
pub struct CH6_ABORT_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_ABORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 37)) | (((value as u64) & 0x01) << 37);
self.w
}
}
#[doc = "Reader of field `ch1_abort_we`"]
pub type CH1_ABORT_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch1_abort_we`"]
pub struct CH1_ABORT_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_ABORT_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 40)) | (((value as u64) & 0x01) << 40);
self.w
}
}
#[doc = "Reader of field `ch2_abort_we`"]
pub type CH2_ABORT_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch2_abort_we`"]
pub struct CH2_ABORT_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH2_ABORT_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 41)) | (((value as u64) & 0x01) << 41);
self.w
}
}
#[doc = "Reader of field `ch3_abort_we`"]
pub type CH3_ABORT_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch3_abort_we`"]
pub struct CH3_ABORT_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_ABORT_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 42)) | (((value as u64) & 0x01) << 42);
self.w
}
}
#[doc = "Reader of field `ch4_abort_we`"]
pub type CH4_ABORT_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch4_abort_we`"]
pub struct CH4_ABORT_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_ABORT_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 43)) | (((value as u64) & 0x01) << 43);
self.w
}
}
#[doc = "Reader of field `ch5_abort_we`"]
pub type CH5_ABORT_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch5_abort_we`"]
pub struct CH5_ABORT_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_ABORT_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 44)) | (((value as u64) & 0x01) << 44);
self.w
}
}
#[doc = "Reader of field `ch6_abort_we`"]
pub type CH6_ABORT_WE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ch6_abort_we`"]
pub struct CH6_ABORT_WE_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_ABORT_WE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 45)) | (((value as u64) & 0x01) << 45);
self.w
}
}
impl R {
#[doc = "Bit 0 - Enable channel 1"]
#[inline(always)]
pub fn ch1_en(&self) -> CH1_EN_R {
CH1_EN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Enable channel 2"]
#[inline(always)]
pub fn ch2_en(&self) -> CH2_EN_R {
CH2_EN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Enable channel 3"]
#[inline(always)]
pub fn ch3_en(&self) -> CH3_EN_R {
CH3_EN_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Enable channel 4"]
#[inline(always)]
pub fn ch4_en(&self) -> CH4_EN_R {
CH4_EN_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Enable channel 5"]
#[inline(always)]
pub fn ch5_en(&self) -> CH5_EN_R {
CH5_EN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Enable channel 6"]
#[inline(always)]
pub fn ch6_en(&self) -> CH6_EN_R {
CH6_EN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 8 - Write enable channel 1"]
#[inline(always)]
pub fn ch1_en_we(&self) -> CH1_EN_WE_R {
CH1_EN_WE_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Write enable channel 2"]
#[inline(always)]
pub fn ch2_en_we(&self) -> CH2_EN_WE_R {
CH2_EN_WE_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Write enable channel 3"]
#[inline(always)]
pub fn ch3_en_we(&self) -> CH3_EN_WE_R {
CH3_EN_WE_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Write enable channel 4"]
#[inline(always)]
pub fn ch4_en_we(&self) -> CH4_EN_WE_R {
CH4_EN_WE_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - Write enable channel 5"]
#[inline(always)]
pub fn ch5_en_we(&self) -> CH5_EN_WE_R {
CH5_EN_WE_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - Write enable channel 6"]
#[inline(always)]
pub fn ch6_en_we(&self) -> CH6_EN_WE_R {
CH6_EN_WE_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 16 - Suspend request channel 1"]
#[inline(always)]
pub fn ch1_susp(&self) -> CH1_SUSP_R {
CH1_SUSP_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - Suspend request channel 2"]
#[inline(always)]
pub fn ch2_susp(&self) -> CH2_SUSP_R {
CH2_SUSP_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - Suspend request channel 3"]
#[inline(always)]
pub fn ch3_susp(&self) -> CH3_SUSP_R {
CH3_SUSP_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - Suspend request channel 4"]
#[inline(always)]
pub fn ch4_susp(&self) -> CH4_SUSP_R {
CH4_SUSP_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - Suspend request channel 5"]
#[inline(always)]
pub fn ch5_susp(&self) -> CH5_SUSP_R {
CH5_SUSP_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - Suspend request channel 6"]
#[inline(always)]
pub fn ch6_susp(&self) -> CH6_SUSP_R {
CH6_SUSP_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 24 - Enable write to ch1_susp bit"]
#[inline(always)]
pub fn ch1_susp_we(&self) -> CH1_SUSP_WE_R {
CH1_SUSP_WE_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - Enable write to ch2_susp bit"]
#[inline(always)]
pub fn ch2_susp_we(&self) -> CH2_SUSP_WE_R {
CH2_SUSP_WE_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - Enable write to ch3_susp bit"]
#[inline(always)]
pub fn ch3_susp_we(&self) -> CH3_SUSP_WE_R {
CH3_SUSP_WE_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 27 - Enable write to ch4_susp bit"]
#[inline(always)]
pub fn ch4_susp_we(&self) -> CH4_SUSP_WE_R {
CH4_SUSP_WE_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 28 - Enable write to ch5_susp bit"]
#[inline(always)]
pub fn ch5_susp_we(&self) -> CH5_SUSP_WE_R {
CH5_SUSP_WE_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - Enable write to ch6_susp bit"]
#[inline(always)]
pub fn ch6_susp_we(&self) -> CH6_SUSP_WE_R {
CH6_SUSP_WE_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 32 - Abort request channel 1"]
#[inline(always)]
pub fn ch1_abort(&self) -> CH1_ABORT_R {
CH1_ABORT_R::new(((self.bits >> 32) & 0x01) != 0)
}
#[doc = "Bit 33 - Abort request channel 2"]
#[inline(always)]
pub fn ch2_abort(&self) -> CH2_ABORT_R {
CH2_ABORT_R::new(((self.bits >> 33) & 0x01) != 0)
}
#[doc = "Bit 34 - Abort request channel 3"]
#[inline(always)]
pub fn ch3_abort(&self) -> CH3_ABORT_R {
CH3_ABORT_R::new(((self.bits >> 34) & 0x01) != 0)
}
#[doc = "Bit 35 - Abort request channel 4"]
#[inline(always)]
pub fn ch4_abort(&self) -> CH4_ABORT_R {
CH4_ABORT_R::new(((self.bits >> 35) & 0x01) != 0)
}
#[doc = "Bit 36 - Abort request channel 5"]
#[inline(always)]
pub fn ch5_abort(&self) -> CH5_ABORT_R {
CH5_ABORT_R::new(((self.bits >> 36) & 0x01) != 0)
}
#[doc = "Bit 37 - Abort request channel 6"]
#[inline(always)]
pub fn ch6_abort(&self) -> CH6_ABORT_R {
CH6_ABORT_R::new(((self.bits >> 37) & 0x01) != 0)
}
#[doc = "Bit 40 - Enable write to ch1_abort bit"]
#[inline(always)]
pub fn ch1_abort_we(&self) -> CH1_ABORT_WE_R {
CH1_ABORT_WE_R::new(((self.bits >> 40) & 0x01) != 0)
}
#[doc = "Bit 41 - Enable write to ch2_abort bit"]
#[inline(always)]
pub fn ch2_abort_we(&self) -> CH2_ABORT_WE_R {
CH2_ABORT_WE_R::new(((self.bits >> 41) & 0x01) != 0)
}
#[doc = "Bit 42 - Enable write to ch3_abort bit"]
#[inline(always)]
pub fn ch3_abort_we(&self) -> CH3_ABORT_WE_R {
CH3_ABORT_WE_R::new(((self.bits >> 42) & 0x01) != 0)
}
#[doc = "Bit 43 - Enable write to ch4_abort bit"]
#[inline(always)]
pub fn ch4_abort_we(&self) -> CH4_ABORT_WE_R {
CH4_ABORT_WE_R::new(((self.bits >> 43) & 0x01) != 0)
}
#[doc = "Bit 44 - Enable write to ch5_abort bit"]
#[inline(always)]
pub fn ch5_abort_we(&self) -> CH5_ABORT_WE_R {
CH5_ABORT_WE_R::new(((self.bits >> 44) & 0x01) != 0)
}
#[doc = "Bit 45 - Enable write to ch6_abort bit"]
#[inline(always)]
pub fn ch6_abort_we(&self) -> CH6_ABORT_WE_R {
CH6_ABORT_WE_R::new(((self.bits >> 45) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Enable channel 1"]
#[inline(always)]
pub fn ch1_en(&mut self) -> CH1_EN_W {
CH1_EN_W { w: self }
}
#[doc = "Bit 1 - Enable channel 2"]
#[inline(always)]
pub fn ch2_en(&mut self) -> CH2_EN_W {
CH2_EN_W { w: self }
}
#[doc = "Bit 2 - Enable channel 3"]
#[inline(always)]
pub fn ch3_en(&mut self) -> CH3_EN_W {
CH3_EN_W { w: self }
}
#[doc = "Bit 3 - Enable channel 4"]
#[inline(always)]
pub fn ch4_en(&mut self) -> CH4_EN_W {
CH4_EN_W { w: self }
}
#[doc = "Bit 4 - Enable channel 5"]
#[inline(always)]
pub fn ch5_en(&mut self) -> CH5_EN_W {
CH5_EN_W { w: self }
}
#[doc = "Bit 5 - Enable channel 6"]
#[inline(always)]
pub fn ch6_en(&mut self) -> CH6_EN_W {
CH6_EN_W { w: self }
}
#[doc = "Bit 8 - Write enable channel 1"]
#[inline(always)]
pub fn ch1_en_we(&mut self) -> CH1_EN_WE_W {
CH1_EN_WE_W { w: self }
}
#[doc = "Bit 9 - Write enable channel 2"]
#[inline(always)]
pub fn ch2_en_we(&mut self) -> CH2_EN_WE_W {
CH2_EN_WE_W { w: self }
}
#[doc = "Bit 10 - Write enable channel 3"]
#[inline(always)]
pub fn ch3_en_we(&mut self) -> CH3_EN_WE_W {
CH3_EN_WE_W { w: self }
}
#[doc = "Bit 11 - Write enable channel 4"]
#[inline(always)]
pub fn ch4_en_we(&mut self) -> CH4_EN_WE_W {
CH4_EN_WE_W { w: self }
}
#[doc = "Bit 12 - Write enable channel 5"]
#[inline(always)]
pub fn ch5_en_we(&mut self) -> CH5_EN_WE_W {
CH5_EN_WE_W { w: self }
}
#[doc = "Bit 13 - Write enable channel 6"]
#[inline(always)]
pub fn ch6_en_we(&mut self) -> CH6_EN_WE_W {
CH6_EN_WE_W { w: self }
}
#[doc = "Bit 16 - Suspend request channel 1"]
#[inline(always)]
pub fn ch1_susp(&mut self) -> CH1_SUSP_W {
CH1_SUSP_W { w: self }
}
#[doc = "Bit 17 - Suspend request channel 2"]
#[inline(always)]
pub fn ch2_susp(&mut self) -> CH2_SUSP_W {
CH2_SUSP_W { w: self }
}
#[doc = "Bit 18 - Suspend request channel 3"]
#[inline(always)]
pub fn ch3_susp(&mut self) -> CH3_SUSP_W {
CH3_SUSP_W { w: self }
}
#[doc = "Bit 19 - Suspend request channel 4"]
#[inline(always)]
pub fn ch4_susp(&mut self) -> CH4_SUSP_W {
CH4_SUSP_W { w: self }
}
#[doc = "Bit 20 - Suspend request channel 5"]
#[inline(always)]
pub fn ch5_susp(&mut self) -> CH5_SUSP_W {
CH5_SUSP_W { w: self }
}
#[doc = "Bit 21 - Suspend request channel 6"]
#[inline(always)]
pub fn ch6_susp(&mut self) -> CH6_SUSP_W {
CH6_SUSP_W { w: self }
}
#[doc = "Bit 24 - Enable write to ch1_susp bit"]
#[inline(always)]
pub fn ch1_susp_we(&mut self) -> CH1_SUSP_WE_W {
CH1_SUSP_WE_W { w: self }
}
#[doc = "Bit 25 - Enable write to ch2_susp bit"]
#[inline(always)]
pub fn ch2_susp_we(&mut self) -> CH2_SUSP_WE_W {
CH2_SUSP_WE_W { w: self }
}
#[doc = "Bit 26 - Enable write to ch3_susp bit"]
#[inline(always)]
pub fn ch3_susp_we(&mut self) -> CH3_SUSP_WE_W {
CH3_SUSP_WE_W { w: self }
}
#[doc = "Bit 27 - Enable write to ch4_susp bit"]
#[inline(always)]
pub fn ch4_susp_we(&mut self) -> CH4_SUSP_WE_W {
CH4_SUSP_WE_W { w: self }
}
#[doc = "Bit 28 - Enable write to ch5_susp bit"]
#[inline(always)]
pub fn ch5_susp_we(&mut self) -> CH5_SUSP_WE_W {
CH5_SUSP_WE_W { w: self }
}
#[doc = "Bit 29 - Enable write to ch6_susp bit"]
#[inline(always)]
pub fn ch6_susp_we(&mut self) -> CH6_SUSP_WE_W {
CH6_SUSP_WE_W { w: self }
}
#[doc = "Bit 32 - Abort request channel 1"]
#[inline(always)]
pub fn ch1_abort(&mut self) -> CH1_ABORT_W {
CH1_ABORT_W { w: self }
}
#[doc = "Bit 33 - Abort request channel 2"]
#[inline(always)]
pub fn ch2_abort(&mut self) -> CH2_ABORT_W {
CH2_ABORT_W { w: self }
}
#[doc = "Bit 34 - Abort request channel 3"]
#[inline(always)]
pub fn ch3_abort(&mut self) -> CH3_ABORT_W {
CH3_ABORT_W { w: self }
}
#[doc = "Bit 35 - Abort request channel 4"]
#[inline(always)]
pub fn ch4_abort(&mut self) -> CH4_ABORT_W {
CH4_ABORT_W { w: self }
}
#[doc = "Bit 36 - Abort request channel 5"]
#[inline(always)]
pub fn ch5_abort(&mut self) -> CH5_ABORT_W {
CH5_ABORT_W { w: self }
}
#[doc = "Bit 37 - Abort request channel 6"]
#[inline(always)]
pub fn ch6_abort(&mut self) -> CH6_ABORT_W {
CH6_ABORT_W { w: self }
}
#[doc = "Bit 40 - Enable write to ch1_abort bit"]
#[inline(always)]
pub fn ch1_abort_we(&mut self) -> CH1_ABORT_WE_W {
CH1_ABORT_WE_W { w: self }
}
#[doc = "Bit 41 - Enable write to ch2_abort bit"]
#[inline(always)]
pub fn ch2_abort_we(&mut self) -> CH2_ABORT_WE_W {
CH2_ABORT_WE_W { w: self }
}
#[doc = "Bit 42 - Enable write to ch3_abort bit"]
#[inline(always)]
pub fn ch3_abort_we(&mut self) -> CH3_ABORT_WE_W {
CH3_ABORT_WE_W { w: self }
}
#[doc = "Bit 43 - Enable write to ch4_abort bit"]
#[inline(always)]
pub fn ch4_abort_we(&mut self) -> CH4_ABORT_WE_W {
CH4_ABORT_WE_W { w: self }
}
#[doc = "Bit 44 - Enable write to ch5_abort bit"]
#[inline(always)]
pub fn ch5_abort_we(&mut self) -> CH5_ABORT_WE_W {
CH5_ABORT_WE_W { w: self }
}
#[doc = "Bit 45 - Enable write to ch6_abort bit"]
#[inline(always)]
pub fn ch6_abort_we(&mut self) -> CH6_ABORT_WE_W {
CH6_ABORT_WE_W { w: self }
}
}
| 30.050766 | 86 | 0.546349 |
ac7f24e68f13633636c4bf3bf61df4e491518970 | 1,953 | #[derive(Debug)]
pub struct CustomSet<T> {
collection: Vec<T>,
}
impl<T: Ord + Clone> PartialEq for CustomSet<T> {
fn eq(&self, other: &Self) -> bool {
self.collection.iter().all(|x| other.contains(&x)) &&
other.collection.iter().all(|x| self.contains(&x))
}
}
impl<T: Ord + Clone> CustomSet<T> {
pub fn new(inputs: &[T]) -> CustomSet<T> {
let mut s = CustomSet { collection: Vec::new() };
for input in inputs {
s.add(input.clone());
}
s
}
pub fn add(&mut self, element: T) {
if !self.contains(&element) {
self.collection.push(element)
}
}
pub fn contains(&self, other: &T) -> bool {
self.collection.contains(other)
}
pub fn is_empty(&self) -> bool {
self.collection.is_empty()
}
pub fn is_subset(&self, other: &Self) -> bool {
self.collection.iter().all(|x| other.contains(x))
}
pub fn is_disjoint(&self, other: &Self) -> bool {
!self.collection.iter().any(|x| other.contains(x))
}
pub fn intersection(&self, other: &Self) -> CustomSet<T> {
CustomSet::new(&self.collection
.iter()
.cloned()
.filter(|c| other.contains(c))
.collect::<Vec<_>>())
}
pub fn union(&self, other: &Self) -> CustomSet<T> {
CustomSet::new(&self.collection
.iter()
.cloned()
.chain(other.collection.iter().cloned())
.collect::<Vec<_>>())
}
pub fn difference(&self, other: &Self) -> CustomSet<T> {
CustomSet::new(&self.collection
.iter()
.cloned()
.filter(|c| !other.contains(c))
.collect::<Vec<_>>())
}
}
| 28.720588 | 68 | 0.465438 |
0816b0e56533c93df0eaf354e34cff420d2f7f0b | 28,115 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::APB1ENR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `TIM2EN`"]
pub type TIM2ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `TIM3EN`"]
pub type TIM3ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `TIM4EN`"]
pub type TIM4ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `TIM6EN`"]
pub type TIM6ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `TIM7EN`"]
pub type TIM7ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `WWDGEN`"]
pub type WWDGENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `SPI2EN`"]
pub type SPI2ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `SPI3EN`"]
pub type SPI3ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `USART2EN`"]
pub type USART2ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `USART3EN`"]
pub type USART3ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `I2C1EN`"]
pub type I2C1ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `I2C2EN`"]
pub type I2C2ENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `USBEN`"]
pub type USBENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `CANEN`"]
pub type CANENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `PWREN`"]
pub type PWRENR = super::ahbenr::DMAENR;
#[doc = "Possible values of the field `DACEN`"]
pub type DACENR = super::ahbenr::DMAENR;
#[doc = "Values that can be written to the field `TIM2EN`"]
pub type TIM2ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _TIM2ENW<'a> {
w: &'a mut W,
}
impl<'a> _TIM2ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: TIM2ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `TIM3EN`"]
pub type TIM3ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _TIM3ENW<'a> {
w: &'a mut W,
}
impl<'a> _TIM3ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: TIM3ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `TIM4EN`"]
pub type TIM4ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _TIM4ENW<'a> {
w: &'a mut W,
}
impl<'a> _TIM4ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: TIM4ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `TIM6EN`"]
pub type TIM6ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _TIM6ENW<'a> {
w: &'a mut W,
}
impl<'a> _TIM6ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: TIM6ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `TIM7EN`"]
pub type TIM7ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _TIM7ENW<'a> {
w: &'a mut W,
}
impl<'a> _TIM7ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: TIM7ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `WWDGEN`"]
pub type WWDGENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _WWDGENW<'a> {
w: &'a mut W,
}
impl<'a> _WWDGENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: WWDGENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `SPI2EN`"]
pub type SPI2ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _SPI2ENW<'a> {
w: &'a mut W,
}
impl<'a> _SPI2ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: SPI2ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `SPI3EN`"]
pub type SPI3ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _SPI3ENW<'a> {
w: &'a mut W,
}
impl<'a> _SPI3ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: SPI3ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `USART2EN`"]
pub type USART2ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _USART2ENW<'a> {
w: &'a mut W,
}
impl<'a> _USART2ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: USART2ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `USART3EN`"]
pub type USART3ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _USART3ENW<'a> {
w: &'a mut W,
}
impl<'a> _USART3ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: USART3ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `I2C1EN`"]
pub type I2C1ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _I2C1ENW<'a> {
w: &'a mut W,
}
impl<'a> _I2C1ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: I2C1ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `I2C2EN`"]
pub type I2C2ENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _I2C2ENW<'a> {
w: &'a mut W,
}
impl<'a> _I2C2ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: I2C2ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `USBEN`"]
pub type USBENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _USBENW<'a> {
w: &'a mut W,
}
impl<'a> _USBENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: USBENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 23;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CANEN`"]
pub type CANENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _CANENW<'a> {
w: &'a mut W,
}
impl<'a> _CANENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CANENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PWREN`"]
pub type PWRENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _PWRENW<'a> {
w: &'a mut W,
}
impl<'a> _PWRENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PWRENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `DACEN`"]
pub type DACENW = super::ahbenr::DMAENW;
#[doc = r" Proxy"]
pub struct _DACENW<'a> {
w: &'a mut W,
}
impl<'a> _DACENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: DACENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disabled."]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::DISABLED)
}
#[doc = "Enabled."]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(super::ahbenr::DMAENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Timer 2 clock enable"]
#[inline]
pub fn tim2en(&self) -> TIM2ENR {
TIM2ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Timer 3 clock enable"]
#[inline]
pub fn tim3en(&self) -> TIM3ENR {
TIM3ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - Timer 4 clock enable"]
#[inline]
pub fn tim4en(&self) -> TIM4ENR {
TIM4ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - Timer 6 clock enable"]
#[inline]
pub fn tim6en(&self) -> TIM6ENR {
TIM6ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 5 - Timer 7 clock enable"]
#[inline]
pub fn tim7en(&self) -> TIM7ENR {
TIM7ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 11 - Window watchdog clock enable"]
#[inline]
pub fn wwdgen(&self) -> WWDGENR {
WWDGENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 14 - SPI 2 clock enable"]
#[inline]
pub fn spi2en(&self) -> SPI2ENR {
SPI2ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 15 - SPI 3 clock enable"]
#[inline]
pub fn spi3en(&self) -> SPI3ENR {
SPI3ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 17 - USART 2 clock enable"]
#[inline]
pub fn usart2en(&self) -> USART2ENR {
USART2ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 18 - USART 3 clock enable"]
#[inline]
pub fn usart3en(&self) -> USART3ENR {
USART3ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 21 - I2C 1 clock enable"]
#[inline]
pub fn i2c1en(&self) -> I2C1ENR {
I2C1ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 22 - I2C 2 clock enable"]
#[inline]
pub fn i2c2en(&self) -> I2C2ENR {
I2C2ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 23 - USB clock enable"]
#[inline]
pub fn usben(&self) -> USBENR {
USBENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - CAN clock enable"]
#[inline]
pub fn canen(&self) -> CANENR {
CANENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 28 - Power interface clock enable"]
#[inline]
pub fn pwren(&self) -> PWRENR {
PWRENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 29 - DAC interface clock enable"]
#[inline]
pub fn dacen(&self) -> DACENR {
DACENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Timer 2 clock enable"]
#[inline]
pub fn tim2en(&mut self) -> _TIM2ENW {
_TIM2ENW { w: self }
}
#[doc = "Bit 1 - Timer 3 clock enable"]
#[inline]
pub fn tim3en(&mut self) -> _TIM3ENW {
_TIM3ENW { w: self }
}
#[doc = "Bit 2 - Timer 4 clock enable"]
#[inline]
pub fn tim4en(&mut self) -> _TIM4ENW {
_TIM4ENW { w: self }
}
#[doc = "Bit 4 - Timer 6 clock enable"]
#[inline]
pub fn tim6en(&mut self) -> _TIM6ENW {
_TIM6ENW { w: self }
}
#[doc = "Bit 5 - Timer 7 clock enable"]
#[inline]
pub fn tim7en(&mut self) -> _TIM7ENW {
_TIM7ENW { w: self }
}
#[doc = "Bit 11 - Window watchdog clock enable"]
#[inline]
pub fn wwdgen(&mut self) -> _WWDGENW {
_WWDGENW { w: self }
}
#[doc = "Bit 14 - SPI 2 clock enable"]
#[inline]
pub fn spi2en(&mut self) -> _SPI2ENW {
_SPI2ENW { w: self }
}
#[doc = "Bit 15 - SPI 3 clock enable"]
#[inline]
pub fn spi3en(&mut self) -> _SPI3ENW {
_SPI3ENW { w: self }
}
#[doc = "Bit 17 - USART 2 clock enable"]
#[inline]
pub fn usart2en(&mut self) -> _USART2ENW {
_USART2ENW { w: self }
}
#[doc = "Bit 18 - USART 3 clock enable"]
#[inline]
pub fn usart3en(&mut self) -> _USART3ENW {
_USART3ENW { w: self }
}
#[doc = "Bit 21 - I2C 1 clock enable"]
#[inline]
pub fn i2c1en(&mut self) -> _I2C1ENW {
_I2C1ENW { w: self }
}
#[doc = "Bit 22 - I2C 2 clock enable"]
#[inline]
pub fn i2c2en(&mut self) -> _I2C2ENW {
_I2C2ENW { w: self }
}
#[doc = "Bit 23 - USB clock enable"]
#[inline]
pub fn usben(&mut self) -> _USBENW {
_USBENW { w: self }
}
#[doc = "Bit 25 - CAN clock enable"]
#[inline]
pub fn canen(&mut self) -> _CANENW {
_CANENW { w: self }
}
#[doc = "Bit 28 - Power interface clock enable"]
#[inline]
pub fn pwren(&mut self) -> _PWRENW {
_PWRENW { w: self }
}
#[doc = "Bit 29 - DAC interface clock enable"]
#[inline]
pub fn dacen(&mut self) -> _DACENW {
_DACENW { w: self }
}
}
| 28.313192 | 61 | 0.517731 |
e93c81cac9f90f46c7e3b61e7ed373547cd75b62 | 2,170 | use std::convert::TryInto;
use std::sync::Arc;
use liblumen_alloc::erts::exception::Alloc;
use liblumen_alloc::erts::process::code::stack::frame::{Frame, Placement};
use liblumen_alloc::erts::process::{code, Process};
use liblumen_alloc::erts::term::prelude::*;
use liblumen_alloc::ModuleFunctionArity;
use web_sys::Element;
use super::label_5;
pub fn place_frame_with_arguments(
process: &Process,
placement: Placement,
body: Term,
) -> Result<(), Alloc> {
assert!(body.is_boxed_resource_reference());
process.stack_push(body)?;
process.place_frame(frame(), placement);
Ok(())
}
// Private
// ```elixir
// # label 3
// # pushed to stack: (body)
// # returned from call: {:ok, child}
// # full stack: ({:ok, child}, body)
// # returns: :ok
// :ok = Lumen.Web.Node.append_child(body, child);
// remove_ok = Lumen.Web.Element.remove(child);
// Lumen.Web.Wait.with_return(remove_ok)
// ```
fn code(arc_process: &Arc<Process>) -> code::Result {
arc_process.reduce();
let ok_child = arc_process.stack_pop().unwrap();
assert!(
ok_child.is_boxed_tuple(),
"ok_child ({:?}) is not a tuple",
ok_child
);
let ok_child_tuple: Boxed<Tuple> = ok_child.try_into().unwrap();
assert_eq!(ok_child_tuple.len(), 2);
assert_eq!(ok_child_tuple[0], Atom::str_to_term("ok"));
let child = ok_child_tuple[1];
let child_ref_boxed: Boxed<Resource> = child.try_into().unwrap();
let child_reference: Resource = child_ref_boxed.into();
let _: &Element = child_reference.downcast_ref().unwrap();
let body = arc_process.stack_pop().unwrap();
assert!(body.is_boxed_resource_reference());
label_5::place_frame_with_arguments(arc_process, Placement::Replace, child)?;
liblumen_web::node::append_child_2::place_frame_with_arguments(
arc_process,
Placement::Push,
body,
child,
)?;
Process::call_code(arc_process)
}
fn frame() -> Frame {
let module_function_arity = Arc::new(ModuleFunctionArity {
module: super::module(),
function: super::function(),
arity: 0,
});
Frame::new(module_function_arity, code)
}
| 27.468354 | 81 | 0.664055 |
bb24f5eef67af1e8f727cfc24c6ae83f8201b5ba | 1,421 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License..
pub mod tls_server;
//pub mod tls_server_client;
pub mod test_http;
pub mod proxy;
pub mod router;
pub const BASE_URL: &str = "127.0.0.1:8443";
pub const BASE_LOCALHOST_URL: &str = "localhost:8443";
pub const HTTPS_BASE_URL: &str = "https://localhost:8443";
pub const TCS_NUM: &u8 = &8;
use sgx_tstd as std;
use std::thread;
use std::time::{Duration};
pub fn start_tls_server(){
//test_http::start_tcp_listener();
thread::spawn(|| {
loop {
thread::sleep(Duration::from_secs(15*60));
proxy::cookie_validator();
}
});
tls_server::prep_server(1024*1024);
} | 32.295455 | 63 | 0.708656 |
14d2d0a6c8dff104951728fb790cf880bcea3f8b | 184 | // run-pass
#![deny(dead_code)]
const LOW_RANGE: char = '0';
const HIGH_RANGE: char = '9';
fn main() {
match '5' {
LOW_RANGE..=HIGH_RANGE => (),
_ => ()
};
}
| 14.153846 | 37 | 0.494565 |
91c79241c28cec048ca9c38e4ea0271708614934 | 5,607 | use crate::{
event::{self, Event},
topology::config::{DataType, GlobalOptions, SourceConfig},
};
use bytes::Bytes;
use codec::BytesDelimitedCodec;
use futures::{future, sync::mpsc, Future, Sink, Stream};
use serde::{Deserialize, Serialize};
use std::{io, net::SocketAddr};
use string_cache::DefaultAtom as Atom;
use tokio::net::udp::{UdpFramed, UdpSocket};
/// UDP processes messages per packet, where messages are separated by newline.
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct UdpConfig {
pub address: SocketAddr,
pub host_key: Option<Atom>,
}
impl UdpConfig {
pub fn new(address: SocketAddr) -> Self {
Self {
address,
host_key: None,
}
}
}
#[typetag::serde(name = "udp")]
impl SourceConfig for UdpConfig {
fn build(
&self,
_name: &str,
_globals: &GlobalOptions,
out: mpsc::Sender<Event>,
) -> crate::Result<super::Source> {
let host_key = self.host_key.clone().unwrap_or(event::HOST.clone());
Ok(udp(self.address, host_key, out))
}
fn output_type(&self) -> DataType {
DataType::Log
}
}
pub fn udp(address: SocketAddr, host_key: Atom, out: mpsc::Sender<Event>) -> super::Source {
let out = out.sink_map_err(|e| error!("error sending event: {:?}", e));
Box::new(
future::lazy(move || {
let socket = UdpSocket::bind(&address).expect("failed to bind to udp listener socket");
info!(message = "listening.", %address);
Ok(socket)
})
.and_then(move |socket| {
let host_key = host_key.clone();
// UDP processes messages per packet, where messages are separated by newline.
// And stretch to end of packet.
UdpFramed::with_decode(socket, BytesDelimitedCodec::new(b'\n'), true)
.map(move |(line, addr): (Bytes, _)| {
let mut event = Event::from(line);
event
.as_mut_log()
.insert_implicit(host_key.clone(), addr.to_string().into());
trace!(message = "Received one event.", ?event);
event
})
// Error from Decoder or UdpSocket
.map_err(|error: io::Error| error!(message = "error reading datagram.", %error))
.forward(out)
// Done with listening and sending
.map(|_| ())
}),
)
}
#[cfg(test)]
mod test {
use super::UdpConfig;
use crate::event;
use crate::test_util::{collect_n, next_addr};
use crate::topology::config::{GlobalOptions, SourceConfig};
use futures::sync::mpsc;
use std::{
net::{SocketAddr, UdpSocket},
thread,
time::Duration,
};
fn send_lines<'a>(addr: SocketAddr, lines: impl IntoIterator<Item = &'a str>) -> SocketAddr {
let bind = next_addr();
let socket = UdpSocket::bind(bind)
.map_err(|e| panic!("{:}", e))
.ok()
.unwrap();
for line in lines {
assert_eq!(
socket
.send_to(line.as_bytes(), addr)
.map_err(|e| panic!("{:}", e))
.ok()
.unwrap(),
line.as_bytes().len()
);
// Space things out slightly to try to avoid dropped packets
thread::sleep(Duration::from_millis(1));
}
// Give packets some time to flow through
thread::sleep(Duration::from_millis(10));
// Done
bind
}
fn init_udp(sender: mpsc::Sender<event::Event>) -> (SocketAddr, tokio::runtime::Runtime) {
let addr = next_addr();
let server = UdpConfig::new(addr)
.build("default", &GlobalOptions::default(), sender)
.unwrap();
let mut rt = tokio::runtime::Runtime::new().unwrap();
rt.spawn(server);
// Wait for udp to start listening
thread::sleep(Duration::from_millis(100));
(addr, rt)
}
#[test]
fn udp_message() {
let (tx, rx) = mpsc::channel(2);
let (address, mut rt) = init_udp(tx);
send_lines(address, vec!["test"]);
let events = rt.block_on(collect_n(rx, 1)).ok().unwrap();
assert_eq!(events[0].as_log()[&event::MESSAGE], "test".into());
}
#[test]
fn udp_multiple_messages() {
let (tx, rx) = mpsc::channel(10);
let (address, mut rt) = init_udp(tx);
send_lines(address, vec!["test\ntest2"]);
let events = rt.block_on(collect_n(rx, 2)).ok().unwrap();
assert_eq!(events[0].as_log()[&event::MESSAGE], "test".into());
assert_eq!(events[1].as_log()[&event::MESSAGE], "test2".into());
}
#[test]
fn udp_multiple_packets() {
let (tx, rx) = mpsc::channel(10);
let (address, mut rt) = init_udp(tx);
send_lines(address, vec!["test", "test2"]);
let events = rt.block_on(collect_n(rx, 2)).ok().unwrap();
assert_eq!(events[0].as_log()[&event::MESSAGE], "test".into());
assert_eq!(events[1].as_log()[&event::MESSAGE], "test2".into());
}
#[test]
fn udp_it_includes_host() {
let (tx, rx) = mpsc::channel(2);
let (address, mut rt) = init_udp(tx);
let from = send_lines(address, vec!["test"]);
let events = rt.block_on(collect_n(rx, 1)).ok().unwrap();
assert_eq!(events[0].as_log()[&event::HOST], format!("{}", from).into());
}
}
| 29.824468 | 99 | 0.545033 |
9bc2f56fa79d975ba07d32623b5798104679782a | 773 | use language::operations::{make_param_doc, Operation, ParamInfo};
pub struct AgentSetIsAlarmedOp;
const DOC: &str = "Sets agent's status as alarmed (value = 1) or peaceful (value = 0).";
pub const OP_CODE: u32 = 1807;
pub const IDENT: &str = "agent_set_is_alarmed";
impl Operation for AgentSetIsAlarmedOp {
fn op_code(&self) -> u32 {
OP_CODE
}
fn documentation(&self) -> &'static str {
DOC
}
fn identifier(&self) -> &'static str {
IDENT
}
fn param_info(&self) -> ParamInfo {
ParamInfo {
num_required: 2,
num_optional: 0,
param_docs: vec![
make_param_doc("<agent_id>", ""),
make_param_doc("<value>", ""),
],
}
}
}
| 22.085714 | 88 | 0.556274 |
cc69c18f7b552dc22800cfbffc221f0869a5dae1 | 656 | // Copyright 2018-2020 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub use super::socket::TcpTransport as RawTransport;
| 41 | 75 | 0.754573 |
f5793cdb2dcd0d84e460680f48299278ffb427a0 | 3,279 | //! This module implements configuration related stuff.
use std::fmt;
use std::env;
use std::io::Read;
use std::fs::File;
use std::path::Path;
use std::collections::BTreeMap;
use rustc_serialize::json::{Object, Json};
/// The pencil `Config` type, We provide ways to fill it from JSON files:
///
/// ```rust,no_run
/// let mut app = pencil::Pencil::new("/demo");
/// app.config.from_jsonfile("yourconfig.json")
/// ```
///
/// You can also load configurations from an environment variable
/// pointing to a file:
///
/// ```rust,no_run
/// let mut app = pencil::Pencil::new("/demo");
/// app.config.from_envvar("YOURAPPLICATION_SETTINGS")
/// ```
///
/// In this case, you have to set this environment variable to the file
/// you want to use. On Linux and OS X you can use the export statement:
///
/// ```bash
/// export YOURAPPLICATION_SETTINGS="/path/to/config/file"
/// ```
#[derive(Clone)]
pub struct Config {
config: Object,
}
impl Default for Config {
fn default() -> Config {
Config::new()
}
}
impl Config {
/// Create a `Config` object.
pub fn new() -> Config {
let json_object: Object = BTreeMap::new();
Config {
config: json_object,
}
}
/// Set a value for the key.
pub fn set(&mut self, key: &str, value: Json) {
self.config.insert(key.to_string(), value);
}
/// Returns a reference to the value corresponding to the key.
pub fn get(&self, key: &str) -> Option<&Json> {
self.config.get(&key.to_string())
}
/// Get a boolean configuration value. If the key doesn't exist
/// or the value is not a `Json::Boolean`, the default value
/// will be returned.
pub fn get_boolean(&self, key: &str, default: bool) -> bool {
match self.get(key) {
Some(value) => {
match *value {
Json::Boolean(value) => value,
_ => default
}
},
None => default
}
}
/// Loads a configuration from an environment variable pointing to
/// a JSON configuration file.
pub fn from_envvar(&mut self, variable_name: &str) {
match env::var(variable_name) {
Ok(value) => self.from_jsonfile(&value),
Err(_) => panic!("The environment variable {} is not set.", variable_name),
}
}
/// Updates the values in the config from a JSON file.
pub fn from_jsonfile(&mut self, filepath: &str) {
let path = Path::new(filepath);
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
let object: Json = Json::from_str(&content).unwrap();
match object {
Json::Object(object) => { self.from_object(object); },
_ => { panic!("The configuration file is not an JSON object."); }
}
}
/// Updates the values from the given `Object`.
pub fn from_object(&mut self, object: Object) {
for (key, value) in &object {
self.set(&key, value.clone());
}
}
}
impl fmt::Debug for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<Pencil Config {:?}>", self.config)
}
}
| 29.017699 | 87 | 0.57731 |
50ce8ba1f8cc380b706f39c047a573709323b5e8 | 15,107 | // Copyright 2019-2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
// The structure of a Debian package looks something like this:
//
// foobar_1.2.3_i386.deb # Actually an ar archive
// debian-binary # Specifies deb format version (2.0 in our case)
// control.tar.gz # Contains files controlling the installation:
// control # Basic package metadata
// md5sums # Checksums for files in data.tar.gz below
// postinst # Post-installation script (optional)
// prerm # Pre-uninstallation script (optional)
// data.tar.gz # Contains files to be installed:
// usr/bin/foobar # Binary executable file
// usr/share/applications/foobar.desktop # Desktop file (for apps)
// usr/share/icons/hicolor/... # Icon files (for apps)
// usr/lib/foobar/... # Other resource files
//
// For cargo-bundle, we put bundle resource files under /usr/lib/package_name/,
// and then generate the desktop file and control file from the bundle
// metadata, as well as generating the md5sums file. Currently we do not
// generate postinst or prerm files.
use super::super::common;
use crate::Settings;
use anyhow::Context;
use heck::ToKebabCase;
use image::{self, codecs::png::PngDecoder, GenericImageView, ImageDecoder};
use libflate::gzip;
use walkdir::WalkDir;
use std::{
collections::BTreeSet,
ffi::OsStr,
fs::{self, File},
io::{self, Write},
path::{Path, PathBuf},
};
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct DebIcon {
pub width: u32,
pub height: u32,
pub is_high_density: bool,
pub path: PathBuf,
}
/// Bundles the project.
/// Returns a vector of PathBuf that shows where the DEB was created.
pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
let arch = match settings.binary_arch() {
"x86" => "i386",
"x86_64" => "amd64",
// ARM64 is detected differently, armel isn't supported, so armhf is the only reasonable choice here.
"arm" => "armhf",
other => other,
};
let package_base_name = format!(
"{}_{}_{}",
settings.main_binary_name(),
settings.version_string(),
arch
);
let package_name = format!("{}.deb", package_base_name);
common::print_bundling(&package_name)?;
let base_dir = settings.project_out_directory().join("bundle/deb");
let package_dir = base_dir.join(&package_base_name);
if package_dir.exists() {
fs::remove_dir_all(&package_dir)
.with_context(|| format!("Failed to remove old {}", package_base_name))?;
}
let package_path = base_dir.join(package_name);
let (data_dir, _) = generate_data(settings, &package_dir)
.with_context(|| "Failed to build data folders and files")?;
copy_custom_files(settings, &data_dir).with_context(|| "Failed to copy custom files")?;
// Generate control files.
let control_dir = package_dir.join("control");
generate_control_file(settings, arch, &control_dir, &data_dir)
.with_context(|| "Failed to create control file")?;
generate_md5sums(&control_dir, &data_dir).with_context(|| "Failed to create md5sums file")?;
// Generate `debian-binary` file; see
// http://www.tldp.org/HOWTO/Debian-Binary-Package-Building-HOWTO/x60.html#AEN66
let debian_binary_path = package_dir.join("debian-binary");
create_file_with_data(&debian_binary_path, "2.0\n")
.with_context(|| "Failed to create debian-binary file")?;
// Apply tar/gzip/ar to create the final package file.
let control_tar_gz_path =
tar_and_gzip_dir(control_dir).with_context(|| "Failed to tar/gzip control directory")?;
let data_tar_gz_path =
tar_and_gzip_dir(data_dir).with_context(|| "Failed to tar/gzip data directory")?;
create_archive(
vec![debian_binary_path, control_tar_gz_path, data_tar_gz_path],
&package_path,
)
.with_context(|| "Failed to create package archive")?;
Ok(vec![package_path])
}
/// Generate the debian data folders and files.
pub fn generate_data(
settings: &Settings,
package_dir: &Path,
) -> crate::Result<(PathBuf, BTreeSet<DebIcon>)> {
// Generate data files.
let data_dir = package_dir.join("data");
let bin_dir = data_dir.join("usr/bin");
for bin in settings.binaries() {
let bin_path = settings.binary_path(bin);
common::copy_file(&bin_path, &bin_dir.join(bin.name()))
.with_context(|| format!("Failed to copy binary from {:?}", bin_path))?;
}
copy_resource_files(settings, &data_dir).with_context(|| "Failed to copy resource files")?;
settings
.copy_binaries(&bin_dir)
.with_context(|| "Failed to copy external binaries")?;
let icons =
generate_icon_files(settings, &data_dir).with_context(|| "Failed to create icon files")?;
generate_desktop_file(settings, &data_dir).with_context(|| "Failed to create desktop file")?;
Ok((data_dir, icons))
}
/// Generate the application desktop file and store it under the `data_dir`.
fn generate_desktop_file(settings: &Settings, data_dir: &Path) -> crate::Result<()> {
let bin_name = settings.main_binary_name();
let desktop_file_name = format!("{}.desktop", bin_name);
let desktop_file_path = data_dir
.join("usr/share/applications")
.join(desktop_file_name);
let file = &mut common::create_file(&desktop_file_path)?;
// For more information about the format of this file, see
// https://developer.gnome.org/integration-guide/stable/desktop-files.html.en
writeln!(file, "[Desktop Entry]")?;
if let Some(category) = settings.app_category() {
writeln!(file, "Categories={}", category.gnome_desktop_categories())?;
} else {
writeln!(file, "Categories=")?;
}
if !settings.short_description().is_empty() {
writeln!(file, "Comment={}", settings.short_description())?;
}
writeln!(file, "Exec={}", bin_name)?;
writeln!(file, "Icon={}", bin_name)?;
writeln!(file, "Name={}", settings.product_name())?;
writeln!(file, "Terminal=false")?;
writeln!(file, "Type=Application")?;
Ok(())
}
/// Generates the debian control file and stores it under the `control_dir`.
fn generate_control_file(
settings: &Settings,
arch: &str,
control_dir: &Path,
data_dir: &Path,
) -> crate::Result<()> {
// For more information about the format of this file, see
// https://www.debian.org/doc/debian-policy/ch-controlfields.html
let dest_path = control_dir.join("control");
let mut file = common::create_file(&dest_path)?;
writeln!(
file,
"Package: {}",
settings.product_name().to_kebab_case().to_ascii_lowercase()
)?;
writeln!(file, "Version: {}", settings.version_string())?;
writeln!(file, "Architecture: {}", arch)?;
// Installed-Size must be divided by 1024, see https://www.debian.org/doc/debian-policy/ch-controlfields.html#installed-size
writeln!(file, "Installed-Size: {}", total_dir_size(data_dir)? / 1024)?;
let authors = settings.authors_comma_separated().unwrap_or_default();
writeln!(file, "Maintainer: {}", authors)?;
if !settings.homepage_url().is_empty() {
writeln!(file, "Homepage: {}", settings.homepage_url())?;
}
let dependencies = settings.deb().depends.as_ref().cloned().unwrap_or_default();
if !dependencies.is_empty() {
writeln!(file, "Depends: {}", dependencies.join(", "))?;
}
let mut short_description = settings.short_description().trim();
if short_description.is_empty() {
short_description = "(none)";
}
let mut long_description = settings.long_description().unwrap_or("").trim();
if long_description.is_empty() {
long_description = "(none)";
}
writeln!(file, "Description: {}", short_description)?;
for line in long_description.lines() {
let line = line.trim();
if line.is_empty() {
writeln!(file, " .")?;
} else {
writeln!(file, " {}", line)?;
}
}
writeln!(file, "Priority: optional")?;
file.flush()?;
Ok(())
}
/// Create an `md5sums` file in the `control_dir` containing the MD5 checksums
/// for each file within the `data_dir`.
fn generate_md5sums(control_dir: &Path, data_dir: &Path) -> crate::Result<()> {
let md5sums_path = control_dir.join("md5sums");
let mut md5sums_file = common::create_file(&md5sums_path)?;
for entry in WalkDir::new(data_dir) {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
continue;
}
let mut file = File::open(path)?;
let mut hash = md5::Context::new();
io::copy(&mut file, &mut hash)?;
for byte in hash.compute().iter() {
write!(md5sums_file, "{:02x}", byte)?;
}
let rel_path = path.strip_prefix(data_dir)?;
let path_str = rel_path.to_str().ok_or_else(|| {
let msg = format!("Non-UTF-8 path: {:?}", rel_path);
io::Error::new(io::ErrorKind::InvalidData, msg)
})?;
writeln!(md5sums_file, " {}", path_str)?;
}
Ok(())
}
/// Copy the bundle's resource files into an appropriate directory under the
/// `data_dir`.
fn copy_resource_files(settings: &Settings, data_dir: &Path) -> crate::Result<()> {
let resource_dir = data_dir.join("usr/lib").join(settings.main_binary_name());
settings.copy_resources(&resource_dir)
}
/// Copies user-defined files to the deb package.
fn copy_custom_files(settings: &Settings, data_dir: &Path) -> crate::Result<()> {
for (deb_path, path) in settings.deb().files.iter() {
let deb_path = if deb_path.is_absolute() {
deb_path.strip_prefix("/").unwrap()
} else {
deb_path
};
if path.is_file() {
common::copy_file(path, data_dir.join(deb_path))?;
} else {
let out_dir = data_dir.join(deb_path);
for entry in walkdir::WalkDir::new(&path) {
let entry_path = entry?.into_path();
if entry_path.is_file() {
let without_prefix = entry_path.strip_prefix(&path).unwrap();
common::copy_file(&entry_path, out_dir.join(without_prefix))?;
}
}
}
}
Ok(())
}
/// Generate the icon files and store them under the `data_dir`.
fn generate_icon_files(settings: &Settings, data_dir: &Path) -> crate::Result<BTreeSet<DebIcon>> {
let base_dir = data_dir.join("usr/share/icons/hicolor");
let get_dest_path = |width: u32, height: u32, is_high_density: bool| {
base_dir.join(format!(
"{}x{}{}/apps/{}.png",
width,
height,
if is_high_density { "@2x" } else { "" },
settings.main_binary_name()
))
};
let mut icons = BTreeSet::new();
// Prefer PNG files.
for icon_path in settings.icon_files() {
let icon_path = icon_path?;
if icon_path.extension() != Some(OsStr::new("png")) {
continue;
}
let decoder = PngDecoder::new(File::open(&icon_path)?)?;
let width = decoder.dimensions().0;
let height = decoder.dimensions().1;
let is_high_density = common::is_retina(&icon_path);
let dest_path = get_dest_path(width, height, is_high_density);
let deb_icon = DebIcon {
width,
height,
is_high_density,
path: dest_path,
};
if !icons.contains(&deb_icon) {
common::copy_file(&icon_path, &deb_icon.path)?;
icons.insert(deb_icon);
}
}
// Fall back to non-PNG files for any missing sizes.
for icon_path in settings.icon_files() {
let icon_path = icon_path?;
if icon_path.extension() == Some(OsStr::new("png")) {
continue;
} else if icon_path.extension() == Some(OsStr::new("icns")) {
let icon_family = icns::IconFamily::read(File::open(&icon_path)?)?;
for icon_type in icon_family.available_icons() {
let width = icon_type.screen_width();
let height = icon_type.screen_height();
let is_high_density = icon_type.pixel_density() > 1;
let dest_path = get_dest_path(width, height, is_high_density);
let deb_icon = DebIcon {
width,
height,
is_high_density,
path: dest_path,
};
if !icons.contains(&deb_icon) {
let icon = icon_family.get_icon_with_type(icon_type)?;
icon.write_png(common::create_file(&deb_icon.path)?)?;
icons.insert(deb_icon);
}
}
} else {
let icon = image::open(&icon_path)?;
let (width, height) = icon.dimensions();
let is_high_density = common::is_retina(&icon_path);
let dest_path = get_dest_path(width, height, is_high_density);
let deb_icon = DebIcon {
width,
height,
is_high_density,
path: dest_path,
};
if !icons.contains(&deb_icon) {
icon.write_to(
&mut common::create_file(&deb_icon.path)?,
image::ImageOutputFormat::Png,
)?;
icons.insert(deb_icon);
}
}
}
Ok(icons)
}
/// Create an empty file at the given path, creating any parent directories as
/// needed, then write `data` into the file.
fn create_file_with_data<P: AsRef<Path>>(path: P, data: &str) -> crate::Result<()> {
let mut file = common::create_file(path.as_ref())?;
file.write_all(data.as_bytes())?;
file.flush()?;
Ok(())
}
/// Computes the total size, in bytes, of the given directory and all of its
/// contents.
fn total_dir_size(dir: &Path) -> crate::Result<u64> {
let mut total: u64 = 0;
for entry in WalkDir::new(&dir) {
total += entry?.metadata()?.len();
}
Ok(total)
}
/// Writes a tar file to the given writer containing the given directory.
fn create_tar_from_dir<P: AsRef<Path>, W: Write>(src_dir: P, dest_file: W) -> crate::Result<W> {
let src_dir = src_dir.as_ref();
let mut tar_builder = tar::Builder::new(dest_file);
for entry in WalkDir::new(&src_dir) {
let entry = entry?;
let src_path = entry.path();
if src_path == src_dir {
continue;
}
let dest_path = src_path.strip_prefix(&src_dir)?;
if entry.file_type().is_dir() {
tar_builder.append_dir(dest_path, src_path)?;
} else {
let mut src_file = fs::File::open(src_path)?;
tar_builder.append_file(dest_path, &mut src_file)?;
}
}
let dest_file = tar_builder.into_inner()?;
Ok(dest_file)
}
/// Creates a `.tar.gz` file from the given directory (placing the new file
/// within the given directory's parent directory), then deletes the original
/// directory and returns the path to the new file.
fn tar_and_gzip_dir<P: AsRef<Path>>(src_dir: P) -> crate::Result<PathBuf> {
let src_dir = src_dir.as_ref();
let dest_path = src_dir.with_extension("tar.gz");
let dest_file = common::create_file(&dest_path)?;
let gzip_encoder = gzip::Encoder::new(dest_file)?;
let gzip_encoder = create_tar_from_dir(src_dir, gzip_encoder)?;
let mut dest_file = gzip_encoder.finish().into_result()?;
dest_file.flush()?;
Ok(dest_path)
}
/// Creates an `ar` archive from the given source files and writes it to the
/// given destination path.
fn create_archive(srcs: Vec<PathBuf>, dest: &Path) -> crate::Result<()> {
let mut builder = ar::Builder::new(common::create_file(dest)?);
for path in &srcs {
builder.append_path(path)?;
}
builder.into_inner()?.flush()?;
Ok(())
}
| 36.314904 | 126 | 0.658701 |
569f04d18ffd16cb689f58e8344b89a7cba71bd8 | 337 | // aux-build:lint-plugin-test.rs
// ignore-stage1
#![feature(plugin)]
#![plugin(lint_plugin_test)]
//~^ WARN use of deprecated attribute `plugin`
#![forbid(test_lint)]
fn lintme() { } //~ ERROR item is named 'lintme'
#[allow(test_lint)]
//~^ ERROR allow(test_lint) overruled by outer forbid(test_lint)
pub fn main() {
lintme();
}
| 21.0625 | 64 | 0.679525 |
bf7d55d6d111d36dd863fd61e51df1244eaca02c | 2,773 | use hir::db::HirDatabase;
use ra_syntax::{
ast::{self, AstNode, AttrsOwner},
SyntaxKind::{COMMENT, WHITESPACE},
TextUnit,
};
use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let nominal = ctx.node_at_offset::<ast::NominalDef>()?;
let node_start = derive_insertion_offset(nominal)?;
ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| {
let derive_attr = nominal
.attrs()
.filter_map(|x| x.as_call())
.filter(|(name, _arg)| name == "derive")
.map(|(_name, arg)| arg)
.next();
let offset = match derive_attr {
None => {
edit.insert(node_start, "#[derive()]\n");
node_start + TextUnit::of_str("#[derive(")
}
Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'),
};
edit.target(nominal.syntax().range());
edit.set_cursor(offset)
});
ctx.build()
}
// Insert `derive` after doc comments.
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextUnit> {
let non_ws_child = nominal
.syntax()
.children_with_tokens()
.find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
Some(non_ws_child.range().start())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_target};
#[test]
fn add_derive_new() {
check_assist(
add_derive,
"struct Foo { a: i32, <|>}",
"#[derive(<|>)]\nstruct Foo { a: i32, }",
);
check_assist(
add_derive,
"struct Foo { <|> a: i32, }",
"#[derive(<|>)]\nstruct Foo { a: i32, }",
);
}
#[test]
fn add_derive_existing() {
check_assist(
add_derive,
"#[derive(Clone)]\nstruct Foo { a: i32<|>, }",
"#[derive(Clone<|>)]\nstruct Foo { a: i32, }",
);
}
#[test]
fn add_derive_new_with_doc_comment() {
check_assist(
add_derive,
"
/// `Foo` is a pretty important struct.
/// It does stuff.
struct Foo { a: i32<|>, }
",
"
/// `Foo` is a pretty important struct.
/// It does stuff.
#[derive(<|>)]
struct Foo { a: i32, }
",
);
}
#[test]
fn add_derive_target() {
check_assist_target(
add_derive,
"
struct SomeThingIrrelevant;
/// `Foo` is a pretty important struct.
/// It does stuff.
struct Foo { a: i32<|>, }
struct EvenMoreIrrelevant;
",
"/// `Foo` is a pretty important struct.
/// It does stuff.
struct Foo { a: i32, }",
);
}
}
| 26.160377 | 82 | 0.518572 |
727b70d936cd309bcf9a33df2582ad682c1ab4e1 | 9,988 | use super::{generate_deterministic_keypairs, KeypairsFile};
use crate::test_utils::TestingPendingAttestationBuilder;
use crate::*;
use bls::get_withdrawal_credentials;
use dirs;
use log::debug;
use rayon::prelude::*;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
pub const KEYPAIRS_FILE: &str = "keypairs.raw_keypairs";
/// Returns the directory where the generated keypairs should be stored.
///
/// It is either `$HOME/.lighthouse/keypairs.raw_keypairs` or, if `$HOME` is not available,
/// `./keypairs.raw_keypairs`.
pub fn keypairs_path() -> PathBuf {
let dir = dirs::home_dir()
.and_then(|home| Some(home.join(".lighthouse")))
.unwrap_or_else(|| PathBuf::from(""));
dir.join(KEYPAIRS_FILE)
}
/// Builds a beacon state to be used for testing purposes.
///
/// This struct should **never be used for production purposes.**
#[derive(Clone)]
pub struct TestingBeaconStateBuilder<T: EthSpec> {
state: BeaconState<T>,
keypairs: Vec<Keypair>,
}
impl<T: EthSpec> TestingBeaconStateBuilder<T> {
/// Attempts to load validators from a file in `$HOME/.lighthouse/keypairs.raw_keypairs`. If
/// the file is unavailable, it generates the keys at runtime.
///
/// If the `$HOME` environment variable is not set, the local directory is used.
///
/// See the `Self::from_keypairs_file` method for more info.
///
/// # Panics
///
/// If the file does not contain enough keypairs or is invalid.
pub fn from_default_keypairs_file_if_exists(validator_count: usize, spec: &ChainSpec) -> Self {
let dir = dirs::home_dir()
.and_then(|home| Some(home.join(".lighthouse")))
.unwrap_or_else(|| PathBuf::from(""));
let file = dir.join(KEYPAIRS_FILE);
if file.exists() {
TestingBeaconStateBuilder::from_keypairs_file(validator_count, &file, spec)
} else {
TestingBeaconStateBuilder::from_deterministic_keypairs(validator_count, spec)
}
}
/// Loads the initial validator keypairs from a file on disk.
///
/// Loading keypairs from file is ~10x faster than generating them. Use the `gen_keys` command
/// on the `test_harness` binary to generate the keys. In the `test_harness` dir, run `cargo
/// run -- gen_keys -h` for help.
///
/// # Panics
///
/// If the file does not exist, is invalid or does not contain enough keypairs.
pub fn from_keypairs_file(validator_count: usize, path: &Path, spec: &ChainSpec) -> Self {
debug!("Loading {} keypairs from file...", validator_count);
let keypairs = Vec::from_raw_file(path, validator_count).unwrap();
TestingBeaconStateBuilder::from_keypairs(keypairs, spec)
}
/// Generates the validator keypairs deterministically.
pub fn from_deterministic_keypairs(validator_count: usize, spec: &ChainSpec) -> Self {
debug!("Generating {} deterministic keypairs...", validator_count);
let keypairs = generate_deterministic_keypairs(validator_count);
TestingBeaconStateBuilder::from_keypairs(keypairs, spec)
}
/// Uses the given keypair for all validators.
pub fn from_single_keypair(
validator_count: usize,
keypair: &Keypair,
spec: &ChainSpec,
) -> Self {
debug!("Generating {} cloned keypairs...", validator_count);
let mut keypairs = Vec::with_capacity(validator_count);
for _ in 0..validator_count {
keypairs.push(keypair.clone())
}
TestingBeaconStateBuilder::from_keypairs(keypairs, spec)
}
/// Creates the builder from an existing set of keypairs.
pub fn from_keypairs(keypairs: Vec<Keypair>, spec: &ChainSpec) -> Self {
let validator_count = keypairs.len();
debug!(
"Building {} Validator objects from keypairs...",
validator_count
);
let validators = keypairs
.par_iter()
.map(|keypair| {
let withdrawal_credentials = Hash256::from_slice(&get_withdrawal_credentials(
&keypair.pk,
spec.bls_withdrawal_prefix_byte,
));
Validator {
pubkey: keypair.pk.clone(),
withdrawal_credentials,
// All validators start active.
activation_epoch: spec.genesis_epoch,
exit_epoch: spec.far_future_epoch,
withdrawable_epoch: spec.far_future_epoch,
initiated_exit: false,
slashed: false,
}
})
.collect();
// TODO: Testing only. Burn with fire later.
// set genesis to the last 30 minute block.
// this is used for testing only. Allows multiple nodes to connect within a 30min window
// and agree on a genesis
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let secs_after_last_period = now.checked_rem(30 * 60).unwrap_or(0);
// genesis is now the last 30 minute block.
let genesis_time = now - secs_after_last_period;
let mut state = BeaconState::genesis(
genesis_time,
Eth1Data {
deposit_root: Hash256::zero(),
block_hash: Hash256::zero(),
},
spec,
);
let balances = vec![32_000_000_000; validator_count];
debug!("Importing {} existing validators...", validator_count);
state.validator_registry = validators;
state.validator_balances = balances;
debug!("BeaconState initialized.");
Self { state, keypairs }
}
/// Consume the builder and return the `BeaconState` and the keypairs for each validator.
pub fn build(self) -> (BeaconState<T>, Vec<Keypair>) {
(self.state, self.keypairs)
}
/// Ensures that the state returned from `Self::build(..)` has all caches pre-built.
///
/// Note: this performs the build when called. Ensure that no changes are made that would
/// invalidate this cache.
pub fn build_caches(&mut self, spec: &ChainSpec) -> Result<(), BeaconStateError> {
let state = &mut self.state;
state.build_epoch_cache(RelativeEpoch::Previous, &spec)?;
state.build_epoch_cache(RelativeEpoch::Current, &spec)?;
state.build_epoch_cache(RelativeEpoch::NextWithRegistryChange, &spec)?;
state.build_epoch_cache(RelativeEpoch::NextWithoutRegistryChange, &spec)?;
state.update_pubkey_cache()?;
Ok(())
}
/// Sets the `BeaconState` to be in a slot, calling `teleport_to_epoch` to update the epoch.
pub fn teleport_to_slot(&mut self, slot: Slot, spec: &ChainSpec) {
self.teleport_to_epoch(slot.epoch(spec.slots_per_epoch), spec);
self.state.slot = slot;
}
/// Sets the `BeaconState` to be in the first slot of the given epoch.
///
/// Sets all justification/finalization parameters to be be as "perfect" as possible (i.e.,
/// highest justified and finalized slots, full justification bitfield, etc).
fn teleport_to_epoch(&mut self, epoch: Epoch, spec: &ChainSpec) {
let state = &mut self.state;
let slot = epoch.start_slot(spec.slots_per_epoch);
state.slot = slot;
state.previous_shuffling_epoch = epoch - 1;
state.current_shuffling_epoch = epoch;
state.previous_shuffling_seed = Hash256::from_low_u64_le(0);
state.current_shuffling_seed = Hash256::from_low_u64_le(1);
state.previous_justified_epoch = epoch - 3;
state.current_justified_epoch = epoch - 2;
state.justification_bitfield = u64::max_value();
state.finalized_epoch = epoch - 3;
state.validator_registry_update_epoch = epoch - 3;
}
/// Creates a full set of attestations for the `BeaconState`. Each attestation has full
/// participation from its committee and references the expected beacon_block hashes.
///
/// These attestations should be fully conducive to justification and finalization.
pub fn insert_attestations(&mut self, spec: &ChainSpec) {
let state = &mut self.state;
state
.build_epoch_cache(RelativeEpoch::Previous, spec)
.unwrap();
state
.build_epoch_cache(RelativeEpoch::Current, spec)
.unwrap();
let current_epoch = state.current_epoch(spec);
let previous_epoch = state.previous_epoch(spec);
let first_slot = previous_epoch.start_slot(spec.slots_per_epoch).as_u64();
let last_slot = current_epoch.end_slot(spec.slots_per_epoch).as_u64()
- spec.min_attestation_inclusion_delay;
let last_slot = std::cmp::min(state.slot.as_u64(), last_slot);
for slot in first_slot..=last_slot {
let slot = Slot::from(slot);
let committees = state
.get_crosslink_committees_at_slot(slot, spec)
.unwrap()
.clone();
for crosslink_committee in committees {
let mut builder = TestingPendingAttestationBuilder::new(
state,
crosslink_committee.shard,
slot,
spec,
);
// The entire committee should have signed the pending attestation.
let signers = vec![true; crosslink_committee.committee.len()];
builder.add_committee_participation(signers);
let attestation = builder.build();
if attestation.data.slot.epoch(spec.slots_per_epoch) < state.current_epoch(spec) {
state.previous_epoch_attestations.push(attestation)
} else {
state.current_epoch_attestations.push(attestation)
}
}
}
}
}
| 38.415385 | 99 | 0.625451 |
e8efc105e9f3cbbde40b27932596704d9c43c56a | 17,380 | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub use self::types::*;
use exonum::{
crypto::{Hash, PublicKey, PUBLIC_KEY_LENGTH},
helpers::{Height, Round, ValidatorId},
merkledb::{BinaryValue, ObjectHash},
messages::{AnyTx, Precommit, SignedMessage, Verified, SIGNED_MESSAGE_MIN_SIZE},
};
use std::borrow::Cow;
mod types;
/// Size of an empty `TransactionsResponse`.
pub const TX_RES_EMPTY_SIZE: usize = SIGNED_MESSAGE_MIN_SIZE + PUBLIC_KEY_LENGTH + 8;
/// When we add transaction to `TransactionResponse` message we will add some overhead
/// to the message size due to Protobuf. This is the higher bound on this overhead.
///
/// ```text
/// Tx response message size <= TX_RES_EMPTY_SIZE
/// + (tx1 size + TX_RES_PB_OVERHEAD_PAYLOAD)
/// + (tx2 size + TX_RES_PB_OVERHEAD_PAYLOAD)
/// + ...
/// ```
pub const TX_RES_PB_OVERHEAD_PAYLOAD: usize = 8;
/// Service messages.
#[derive(Debug, Clone, PartialEq)]
pub enum Service {
/// Transaction message.
AnyTx(Verified<AnyTx>),
/// Connect message.
Connect(Verified<Connect>),
/// Status message.
Status(Verified<Status>),
}
impl Service {
fn as_raw(&self) -> &SignedMessage {
match self {
Self::AnyTx(ref msg) => msg.as_raw(),
Self::Connect(ref msg) => msg.as_raw(),
Self::Status(ref msg) => msg.as_raw(),
}
}
}
/// Consensus messages.
#[derive(Debug, Clone, PartialEq)]
pub enum Consensus {
/// `Precommit` message.
Precommit(Verified<Precommit>),
/// `Propose` message.
Propose(Verified<Propose>),
/// `Prevote` message.
Prevote(Verified<Prevote>),
}
impl Consensus {
fn as_raw(&self) -> &SignedMessage {
match self {
Self::Precommit(msg) => msg.as_raw(),
Self::Propose(msg) => msg.as_raw(),
Self::Prevote(msg) => msg.as_raw(),
}
}
}
/// Response messages.
#[derive(Debug, Clone, PartialEq)]
pub enum Responses {
/// Transactions response message.
TransactionsResponse(Verified<TransactionsResponse>),
/// Block response message.
BlockResponse(Verified<BlockResponse>),
}
impl Responses {
fn as_raw(&self) -> &SignedMessage {
match self {
Self::TransactionsResponse(msg) => msg.as_raw(),
Self::BlockResponse(msg) => msg.as_raw(),
}
}
}
impl From<Verified<TransactionsResponse>> for Responses {
fn from(msg: Verified<TransactionsResponse>) -> Self {
Self::TransactionsResponse(msg)
}
}
impl From<Verified<BlockResponse>> for Responses {
fn from(msg: Verified<BlockResponse>) -> Self {
Self::BlockResponse(msg)
}
}
/// Request messages.
#[derive(Debug, Clone, PartialEq)]
pub enum Requests {
/// Propose request message.
ProposeRequest(Verified<ProposeRequest>),
/// Transactions request message.
TransactionsRequest(Verified<TransactionsRequest>),
/// Prevotes request message.
PrevotesRequest(Verified<PrevotesRequest>),
/// Peers request message.
PeersRequest(Verified<PeersRequest>),
/// Block request message.
BlockRequest(Verified<BlockRequest>),
/// Request of uncommitted transactions.
PoolTransactionsRequest(Verified<PoolTransactionsRequest>),
}
impl Requests {
fn as_raw(&self) -> &SignedMessage {
match self {
Self::ProposeRequest(msg) => msg.as_raw(),
Self::TransactionsRequest(msg) => msg.as_raw(),
Self::PrevotesRequest(msg) => msg.as_raw(),
Self::PeersRequest(msg) => msg.as_raw(),
Self::BlockRequest(msg) => msg.as_raw(),
Self::PoolTransactionsRequest(msg) => msg.as_raw(),
}
}
}
/// Representation of the Exonum message which is divided into categories.
#[derive(Debug, Clone, PartialEq)]
pub enum Message {
/// Service messages.
Service(Service),
/// Consensus messages.
Consensus(Consensus),
/// Responses messages.
Responses(Responses),
/// Requests messages.
Requests(Requests),
}
impl Message {
/// Deserialize message from signed message.
pub fn from_signed(signed: SignedMessage) -> anyhow::Result<Self> {
signed.into_verified::<ExonumMessage>().map(From::from)
}
/// Checks buffer and returns instance of `Message`.
pub fn from_raw_buffer(buffer: Vec<u8>) -> anyhow::Result<Self> {
SignedMessage::from_bytes(buffer.into()).and_then(Self::from_signed)
}
/// Get inner `SignedMessage`.
pub fn as_raw(&self) -> &SignedMessage {
match self {
Self::Service(ref msg) => msg.as_raw(),
Self::Consensus(ref msg) => msg.as_raw(),
Self::Requests(ref msg) => msg.as_raw(),
Self::Responses(ref msg) => msg.as_raw(),
}
}
}
impl PartialEq<SignedMessage> for Message {
fn eq(&self, other: &SignedMessage) -> bool {
self.as_raw() == other
}
}
macro_rules! impl_message_from_verified {
( $($concrete:ident: $category:ident),* ) => {
$(
impl From<Verified<$concrete>> for Message {
fn from(msg: Verified<$concrete>) -> Self {
Self::$category($category::$concrete(msg))
}
}
impl std::convert::TryFrom<Message> for Verified<$concrete> {
type Error = anyhow::Error;
fn try_from(msg: Message) -> Result<Self, Self::Error> {
if let Message::$category($category::$concrete(msg)) = msg {
Ok(msg)
} else {
Err(anyhow::format_err!(
"Given message is not a {}::{}",
stringify!($category),
stringify!($concrete)
))
}
}
}
)*
impl From<Verified<ExonumMessage>> for Message {
fn from(msg: Verified<ExonumMessage>) -> Self {
match msg.payload() {
$(
ExonumMessage::$concrete(_) => {
let inner = msg.downcast_map(|payload| match payload {
ExonumMessage::$concrete(payload) => payload,
_ => unreachable!(),
});
Self::from(inner)
}
)*
}
}
}
};
}
impl_message_from_verified! {
AnyTx: Service,
Connect: Service,
Status: Service,
Precommit: Consensus,
Prevote: Consensus,
Propose: Consensus,
BlockResponse: Responses,
TransactionsResponse: Responses,
BlockRequest: Requests,
PeersRequest: Requests,
PrevotesRequest: Requests,
ProposeRequest: Requests,
TransactionsRequest: Requests,
PoolTransactionsRequest: Requests
}
impl Requests {
/// Returns public key of the message recipient.
pub fn to(&self) -> PublicKey {
match self {
Self::ProposeRequest(msg) => msg.payload().to,
Self::TransactionsRequest(msg) => msg.payload().to,
Self::PrevotesRequest(msg) => msg.payload().to,
Self::PeersRequest(msg) => msg.payload().to,
Self::BlockRequest(msg) => msg.payload().to,
Self::PoolTransactionsRequest(msg) => msg.payload().to,
}
}
/// Returns author public key of the message sender.
pub fn author(&self) -> PublicKey {
match self {
Self::ProposeRequest(msg) => msg.author(),
Self::TransactionsRequest(msg) => msg.author(),
Self::PrevotesRequest(msg) => msg.author(),
Self::PeersRequest(msg) => msg.author(),
Self::BlockRequest(msg) => msg.author(),
Self::PoolTransactionsRequest(msg) => msg.author(),
}
}
}
impl Consensus {
/// Returns author public key of the message sender.
pub fn author(&self) -> PublicKey {
match self {
Self::Propose(msg) => msg.author(),
Self::Prevote(msg) => msg.author(),
Self::Precommit(msg) => msg.author(),
}
}
/// Returns validator id of the message sender.
pub fn validator(&self) -> ValidatorId {
match self {
Self::Propose(msg) => msg.payload().validator,
Self::Prevote(msg) => msg.payload().validator,
Self::Precommit(msg) => msg.payload().validator,
}
}
/// Returns the epoch the message belongs tp.
pub fn epoch(&self) -> Height {
match self {
Self::Propose(msg) => msg.payload().epoch,
Self::Prevote(msg) => msg.payload().epoch,
Self::Precommit(msg) => msg.payload().epoch,
}
}
/// Returns round of the message.
pub fn round(&self) -> Round {
match self {
Self::Propose(msg) => msg.payload().round,
Self::Prevote(msg) => msg.payload().round,
Self::Precommit(msg) => msg.payload().round,
}
}
}
impl BinaryValue for Message {
fn to_bytes(&self) -> Vec<u8> {
self.as_raw().to_bytes()
}
fn from_bytes(value: Cow<'_, [u8]>) -> anyhow::Result<Self> {
let message = SignedMessage::from_bytes(value)?;
Self::from_signed(message)
}
}
impl ObjectHash for Message {
fn object_hash(&self) -> Hash {
self.as_raw().object_hash()
}
}
#[cfg(test)]
mod tests {
use chrono::Utc;
use exonum::{
blockchain::{AdditionalHeaders, Block, BlockProof},
crypto::{self, KeyPair},
merkledb::ObjectHash,
};
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn test_verified_from_signed_correct_signature() {
let keypair = KeyPair::random();
let msg = Status {
epoch: Height(0),
blockchain_height: Height(0),
last_hash: Hash::zero(),
pool_size: 0,
};
let protocol_message = ExonumMessage::from(msg.clone());
let signed = SignedMessage::new(
protocol_message.clone(),
keypair.public_key(),
keypair.secret_key(),
);
let verified_protocol = signed.clone().into_verified::<ExonumMessage>().unwrap();
assert_eq!(*verified_protocol.payload(), protocol_message);
let verified_status = signed.clone().into_verified::<Status>().unwrap();
assert_eq!(*verified_status.payload(), msg);
// Wrong variant
let err = signed.into_verified::<Precommit>().unwrap_err();
assert_eq!(err.to_string(), "Failed to decode message from payload.");
}
#[test]
fn test_verified_from_signed_incorrect_signature() {
let keypair = KeyPair::random();
let msg = Status {
epoch: Height(0),
blockchain_height: Height(0),
last_hash: Hash::zero(),
pool_size: 0,
};
let protocol_message = ExonumMessage::from(msg);
let mut signed =
SignedMessage::new(protocol_message, keypair.public_key(), keypair.secret_key());
// Update author
signed.author = KeyPair::random().public_key();
let err = signed.into_verified::<ExonumMessage>().unwrap_err();
assert_eq!(err.to_string(), "Failed to verify signature.");
}
#[test]
fn test_verified_status_binary_value() {
let keypair = KeyPair::random();
let msg = Verified::from_value(
Status {
epoch: Height(0),
blockchain_height: Height(0),
last_hash: Hash::zero(),
pool_size: 0,
},
keypair.public_key(),
keypair.secret_key(),
);
assert_eq!(msg.object_hash(), msg.as_raw().object_hash());
let bytes = msg.to_bytes();
let msg2 = Verified::<Status>::from_bytes(bytes.into()).unwrap();
assert_eq!(msg, msg2);
}
#[test]
fn test_tx_response_empty_size() {
let keys = KeyPair::random();
let msg = TransactionsResponse::new(keys.public_key(), vec![]);
let msg = Verified::from_value(msg, keys.public_key(), keys.secret_key());
assert_eq!(TX_RES_EMPTY_SIZE, msg.into_bytes().len())
}
#[test]
fn test_tx_response_with_txs_size() {
let keys = KeyPair::random();
let txs = vec![
vec![1_u8; 8],
vec![2_u8; 16],
vec![3_u8; 64],
vec![4_u8; 256],
vec![5_u8; 4096],
];
let txs_size = txs.iter().fold(0, |acc, tx| acc + tx.len());
let pb_max_overhead = TX_RES_PB_OVERHEAD_PAYLOAD * txs.len();
let msg = TransactionsResponse::new(keys.public_key(), txs);
let msg = Verified::from_value(msg, keys.public_key(), keys.secret_key());
assert!(TX_RES_EMPTY_SIZE + txs_size + pb_max_overhead >= msg.into_bytes().len())
}
#[test]
fn test_block() {
let keys = KeyPair::random();
let ts = Utc::now();
let txs = [2];
let tx_count = txs.len() as u32;
let content = Block {
height: Height(500),
tx_count,
prev_hash: crypto::hash(&[1]),
tx_hash: crypto::hash(&txs),
state_hash: crypto::hash(&[3]),
error_hash: crypto::hash(&[4]),
additional_headers: AdditionalHeaders::new(),
};
let precommits = vec![
Verified::from_value(
Precommit::new(
ValidatorId(123),
Height(15),
Round(25),
crypto::hash(&[1, 2, 3]),
crypto::hash(&[3, 2, 1]),
ts,
),
keys.public_key(),
keys.secret_key(),
),
Verified::from_value(
Precommit::new(
ValidatorId(13),
Height(25),
Round(35),
crypto::hash(&[4, 2, 3]),
crypto::hash(&[3, 3, 1]),
ts,
),
keys.public_key(),
keys.secret_key(),
),
Verified::from_value(
Precommit::new(
ValidatorId(323),
Height(15),
Round(25),
crypto::hash(&[1, 1, 3]),
crypto::hash(&[5, 2, 1]),
ts,
),
keys.public_key(),
keys.secret_key(),
),
];
let transactions = [
Verified::from_value(
Status::new(Height(2), Height(2), crypto::hash(&[]), 0),
keys.public_key(),
keys.secret_key(),
),
Verified::from_value(
Status::new(Height(4), Height(4), crypto::hash(&[2]), 0),
keys.public_key(),
keys.secret_key(),
),
Verified::from_value(
Status::new(Height(7), Height(7), crypto::hash(&[3]), 0),
keys.public_key(),
keys.secret_key(),
),
]
.iter()
.map(ObjectHash::object_hash)
.collect::<Vec<_>>();
let precommits_buf: Vec<_> = precommits.iter().map(BinaryValue::to_bytes).collect();
let block = Verified::from_value(
BlockResponse::new(
keys.public_key(),
content.clone(),
precommits_buf.clone(),
transactions.iter().cloned(),
),
keys.public_key(),
keys.secret_key(),
);
assert_eq!(block.author(), keys.public_key());
assert_eq!(block.payload().to, keys.public_key());
assert_eq!(block.payload().block, content);
assert_eq!(block.payload().precommits, precommits_buf);
assert_eq!(block.payload().transactions, transactions);
let block2: Verified<BlockResponse> = SignedMessage::from_bytes(block.to_bytes().into())
.unwrap()
.into_verified()
.unwrap();
assert_eq!(block2.author(), keys.public_key());
assert_eq!(block2.payload().to, keys.public_key());
assert_eq!(block2.payload().block, content);
assert_eq!(block2.payload().precommits, precommits_buf);
assert_eq!(block2.payload().transactions, transactions);
let block_proof = BlockProof::new(content, precommits);
let json_str = serde_json::to_string(&block_proof).unwrap();
let block_proof_1: BlockProof = serde_json::from_str(&json_str).unwrap();
assert_eq!(block_proof, block_proof_1);
}
}
| 32.066421 | 96 | 0.553222 |
390ad255f442fa6ad208f546af45a2ee78751172 | 1,594 | use std::{convert::TryInto, path::Path};
use crate::data;
/// Instantiation
impl data::File {
/// Try opening a data file at the given `path`.
///
/// The `object_hash` is a way to read (and write) the same file format with different hashes, as the hash kind
/// isn't stored within the file format itself.
pub fn at(path: impl AsRef<Path>, object_hash: git_hash::Kind) -> Result<data::File, data::header::decode::Error> {
Self::at_inner(path.as_ref(), object_hash)
}
fn at_inner(path: &Path, object_hash: git_hash::Kind) -> Result<data::File, data::header::decode::Error> {
use crate::data::header::N32_SIZE;
let hash_len = object_hash.len_in_bytes();
let data = crate::mmap::read_only(path).map_err(|e| data::header::decode::Error::Io {
source: e,
path: path.to_owned(),
})?;
let pack_len = data.len();
if pack_len < N32_SIZE * 3 + hash_len {
return Err(data::header::decode::Error::Corrupt(format!(
"Pack data of size {} is too small for even an empty pack with shortest hash",
pack_len
)));
}
let (kind, num_objects) =
data::header::decode(&data[..12].try_into().expect("enough data after previous check"))?;
Ok(data::File {
data,
path: path.to_owned(),
id: git_features::hash::crc32(path.as_os_str().to_string_lossy().as_bytes()),
version: kind,
num_objects,
hash_len,
object_hash,
})
}
}
| 37.069767 | 119 | 0.574655 |
fee2df0dc6d52375abcfa6a3352cc291cf4d5caf | 235 | // This test checks that #[ockam::test] causes a compile time error
// if the function is passed a param that is not of type `ockam_node::Context`
#[ockam::test]
async fn my_test(ctx: std::string::String) -> ockam_core::Result<()> {}
| 39.166667 | 78 | 0.706383 |
f81aff858c6142b7e15105ebc0eb859005d5a102 | 34,304 | /* ----------------------------------------------------------------------------
Copyright (c) 2018, Microsoft Research, Daan Leijen
This is free software; you can redistribute it and/or modify it under the
terms of the MIT license. A copy of the license can be found in the file
"LICENSE" at the root of this distribution.
-----------------------------------------------------------------------------*/
// memcpy
// Empty page used to initialize the small free pages array
pub static _mi_page_empty: mi_page_t =
mi_page_t{_0: 0,
_1: false,
_2: false,
_3: [0],
_4: 0,
_5: 0,
_6: ptr::null(),
_7: 0,
_8: 0, // free, used, cookie
_9: ptr::null(),
_10: 0,
_11: [0],
_12: 0,
_13: ptr::null(),
_14: ptr::null(),
_15: ptr::null(),};
pub static MI_SMALL_PAGES_EMPTY: [*mut mi_page_t; 130] =
// Empty page queues for every bin
/*131072, Huge queue */
/* Full queue */
// Empty statistics
// --------------------------------------------------------
// Statically allocate an empty heap as the initial
// thread local value for the default heap,
// and statically allocate the backing heap for the main
// thread so it can function without doing any allocation
// itself (as accessing a thread local for the first time
// may lead to allocation itself on some platforms)
// --------------------------------------------------------
[(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t), (&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t),
(&_mi_page_empty as *mut mi_page_t)];
pub static MI_PAGE_QUEUES_EMPTY: [mi_page_queue_t; 66] =
[mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 1 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 1 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 2 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 3 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 4 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 5 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 6 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 7 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 8 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 10 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 12 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 14 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 16 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 20 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 24 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 28 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 32 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 40 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 48 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 56 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 64 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 80 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 96 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 112 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 128 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 160 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 192 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 224 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 256 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 320 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 384 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 448 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 512 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 640 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 768 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 896 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 1024 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 1280 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 1536 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 1792 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 2048 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 2560 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 3072 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 3584 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 4096 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 5120 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 6144 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 7168 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 8192 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 10240 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 12288 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 14336 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 16384 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 20480 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 24576 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 28672 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 32768 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 40960 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 49152 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 57344 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 65536 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 81920 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 98304 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2: 114688 * std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2:
((((1 << (6 + (13 + 3))) / 8) >> 3) + 1) *
std::mem::size_of::<usize>(),},
mi_page_queue_t{_0: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_1: (ptr::null_mut() as *mut c_void) as *mut mi_page_t,
_2:
((((1 << (6 + (13 + 3))) / 8) >> 3) + 2) *
std::mem::size_of::<usize>(),}];
pub static _mi_heap_empty: mi_heap_t =
mi_heap_t{_0: ptr::null(),
_1: MI_SMALL_PAGES_EMPTY,
_2: MI_PAGE_QUEUES_EMPTY,
_3: ptr::null(),
_4: 0,
_5: 0,
_6: 0,
_7: 0,
_8: false,};
pub static mut _mi_heap_default: *mut mi_heap_t =
&_mi_heap_empty as *mut mi_heap_t;
pub static tld_main_stats: *mut mi_stats_t =
(&mut tld_main as *mut u8).offset(offsetof::<mi_tld_t>("stats")) as
*mut mi_stats_t;
pub static MI_STATS_NULL: mi_stat_count_t =
// segments
// os
mi_stat_count_t{_0: 0, _1: 0, _2: 0, _3: 0,};
pub static mut tld_main: mi_tld_t =
mi_tld_t{_0: 0u64,
_1: &mut _mi_heap_main,
_2:
[[ptr::null_mut(), ptr::null_mut()], 0, 0, 0,
ptr::null_mut(), tld_main_stats],
_3: [0, ptr::null_mut(), ptr::null_mut(), 0, tld_main_stats],
_4:
[MI_STATS_NULL, [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0]]],};
// stats
pub static mut _mi_heap_main: mi_heap_t =
mi_heap_t{_0: &mut tld_main,
_1: MI_SMALL_PAGES_EMPTY,
_2: MI_PAGE_QUEUES_EMPTY,
_3: ptr::null_mut(),
_4: 0,
_5: 0,
_6: 0,
_7: 0,
_8: false,};
// can reclaim
pub static mut _mi_process_is_initialized: bool = false;
// set to `true` in `mi_process_init`.
pub static mut _mi_stats_main: mi_stats_t =
mi_stats_t{_0: MI_STATS_NULL,
_1: [0, 0, 0, 0],
_2: [0, 0, 0, 0],
_3: [0, 0, 0, 0],
_4: [0, 0, 0, 0],
_5: [0, 0, 0, 0],
_6: [0, 0, 0, 0],
_7: [0, 0, 0, 0],
_8: [0, 0, 0, 0],
_9: [0, 0, 0, 0],
_10: [0, 0, 0, 0],
_11: [0, 0, 0, 0],
_12: [0, 0, 0, 0],
_13: [0, 0, 0, 0],
_14: [0, 0],
_15:
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0],
[0, 0, 0, 0]],};
/* -----------------------------------------------------------
Initialization of random numbers
----------------------------------------------------------- */
#[no_mangle]
pub unsafe extern "C" fn _mi_random_shuffle(mut x: usize)
->
usize { // by Sebastiano Vigna, see: <http://xoshiro.di.unimi.it/splitmix64.c>
x ^=
x >>
30; // by Chris Wellons, see: <https://nullprogram.com/blog/2018/07/31/>
x *= 13787848793156543929; /* can be zero */
x ^= x >> 27;
x *= 10723151780598845931;
x ^= x >> 31;
return x;
}
// Hopefully, ASLR makes our function address random
// xor with high res time
pub static CLOCK_MONOTONIC: c_int = 1;
#[no_mangle]
pub unsafe extern "C" fn _mi_random_init(mut seed: usize) -> usize {
let mut x = (&mut _mi_random_init as *mut c_void) as usize;
x ^= seed;
let mut time: timespec;
clock_gettime(CLOCK_MONOTONIC, &mut time);
x ^= time.tv_sec as usize;
x ^= time.tv_nsec as usize;
// and do a few randomization steps
let mut max = ((x ^ (x >> 7)) & 15) + 1;
for mut i in 0..max { x = _mi_random_shuffle(x); }
return x;
}
#[no_mangle]
pub unsafe extern "C" fn _mi_ptr_cookie(mut p: *const c_void) -> usize {
return ((p as usize) ^ _mi_heap_main.cookie);
}
/* -----------------------------------------------------------
Initialization and freeing of the thread local heaps
----------------------------------------------------------- */
pub struct mi_thread_data_s {
pub heap: mi_heap_t,
pub tld: mi_tld_t, // must come first due to cast in `_mi_heap_done`
}
// Initialize the thread local default heap, called from `mi_thread_init`
unsafe fn _mi_heap_init() -> bool {
if mi_heap_is_initialized(_mi_heap_default) {
return true; // the main heap is statically allocated
} // use `_mi_os_alloc` to allocate directly from the OS
if _mi_is_main_thread() {
_mi_heap_default = &mut _mi_heap_main;
} else {
let mut td =
_mi_os_alloc(std::mem::size_of::<mi_thread_data_t>(),
&mut _mi_stats_main) as *mut mi_thread_data_t;
// Todo: more efficient allocation?
if td.is_null() {
_mi_error_message("failed to allocate thread local heap memory\n");
return false;
}
let mut tld = &mut td.tld;
let mut heap = &mut td.heap;
memcpy(heap as *mut _, &_mi_heap_empty,
std::mem::size_of::<mi_heap_t>());
heap.thread_id = _mi_thread_id();
heap.random = _mi_random_init(heap.thread_id);
heap.cookie = ((heap as usize) ^ _mi_heap_random(heap)) | 1;
heap.tld = tld;
memset(tld as *mut _, 0, std::mem::size_of::<mi_tld_t>());
tld.heap_backing = heap;
tld.segments.stats = &mut tld.stats;
tld.os.stats = &mut tld.stats;
_mi_heap_default = heap;
}
return false;
}
// Free the thread local default heap (called from `mi_thread_done`)
// reset default heap
// todo: delete all non-backing heaps?
// switch to backing heap and free it
// free if not the main thread (or in debug mode)
pub static MI_DEBUG: c_int = 1;
unsafe fn _mi_heap_done() -> bool {
let mut heap = _mi_heap_default;
if !mi_heap_is_initialized(heap) { return true; }
_mi_heap_default =
if _mi_is_main_thread() != 0 {
&mut _mi_heap_main
} else { &_mi_heap_empty as *mut mi_heap_t };
heap = heap.tld.heap_backing;
if !mi_heap_is_initialized(heap) { return false; }
_mi_stats_done(&mut heap.tld.stats);
if heap != &mut _mi_heap_main {
if heap.page_count > 0 { _mi_heap_collect_abandon(heap); }
_mi_os_free(heap as *mut _, std::mem::size_of::<mi_thread_data_t>(),
&mut _mi_stats_main);
} else if MI_DEBUG > 0 { _mi_heap_destroy_pages(heap); }
return false;
}
// --------------------------------------------------------
// Try to run `mi_thread_done()` automatically so any memory
// owned by the thread but not yet released can be abandoned
// and re-owned by another thread.
//
// 1. windows dynamic library:
// call from DllMain on DLL_THREAD_DETACH
// 2. windows static library:
// use `FlsAlloc` to call a destructor when the thread is done
// 3. unix, pthreads:
// use a pthread key to call a destructor when a pthread is done
//
// In the last two cases we also need to call `mi_process_init`
// to set up the thread local keys.
// --------------------------------------------------------
// nothing to do as it is done in DllMain
// use thread local storage keys to detect thread ending
// use pthread locol storage keys to detect thread ending
pub static mut mi_pthread_key: pthread_key_t = ();
unsafe fn mi_pthread_done(mut value: *mut c_void) {
if !value.is_null() { mi_thread_done(); };
}
// Set up handlers so `mi_thread_done` is called automatically
unsafe fn mi_process_setup_auto_thread_done() {
let mut tls_initialized = false; // fine if it races
if tls_initialized != 0 {
return; // nothing to do as it is done in DllMain
}
tls_initialized = true;
pthread_key_create(&mut mi_pthread_key, &mut mi_pthread_done);
}
#[no_mangle]
pub unsafe extern "C" fn _mi_is_main_thread() -> bool {
return (_mi_heap_main.thread_id == 0 ||
_mi_heap_main.thread_id == _mi_thread_id());
}
// This is called from the `mi_malloc_generic`
#[no_mangle]
pub unsafe extern "C" fn mi_thread_init() {
// ensure our process has started already
mi_process_init();
// initialize the thread local default heap
if _mi_heap_init() {
return; // returns true if already initialized
}
// don't further initialize for the main thread
if _mi_is_main_thread() { return; }
_mi_stat_increase(&mut (mi_get_default_heap().tld.stats.threads), 1);
// set hooks so our mi_thread_done() will be called
// nothing to do as it is done in DllMain
// set to a dummy value so that `mi_fls_done` is called
pthread_setspecific(mi_pthread_key,
(_mi_thread_id() | 1) as *mut c_void as
*const c_void);
// set to a dummy value so that `mi_pthread_done` is called
_mi_verbose_message("thread init: 0x%zx\n", _mi_thread_id());
}
#[no_mangle]
pub unsafe extern "C" fn mi_thread_done() {
// stats
let mut heap = mi_get_default_heap();
if !_mi_is_main_thread() && mi_heap_is_initialized(heap) != 0 {
_mi_stat_decrease(&mut (heap.tld.stats.threads), 1);
}
// abandon the thread local heap
if _mi_heap_done() {
return; // returns true if already ran
}
if !_mi_is_main_thread() {
_mi_verbose_message("thread done: 0x%zx\n", _mi_thread_id());
};
}
// --------------------------------------------------------
// Run functions on process init/done, and thread init/done
// --------------------------------------------------------
#[no_mangle]
pub unsafe extern "C" fn mi_process_init() {
// ensure we are called once
if _mi_process_is_initialized != 0 { return; }
_mi_process_is_initialized = true;
_mi_heap_main.thread_id = _mi_thread_id();
_mi_verbose_message("process init: 0x%zx\n", _mi_heap_main.thread_id);
let mut random = _mi_random_init(_mi_heap_main.thread_id);
_mi_heap_main.cookie = (&mut _mi_heap_main as usize) ^ random;
_mi_heap_main.random = _mi_random_shuffle(random);
_mi_verbose_message("debug level : %d\n", MI_DEBUG);
atexit(&mut mi_process_done);
mi_process_setup_auto_thread_done();
mi_stats_reset();
}
unsafe fn mi_process_done() {
// only shutdown if we were initialized
if _mi_process_is_initialized == 0 { return; }
// ensure we are called once
let mut process_done = false;
if process_done != 0 { return; }
process_done = true;
mi_collect(true);
if mi_option_is_enabled(mi_option_show_stats) != 0 ||
mi_option_is_enabled(mi_option_verbose) != 0 {
mi_stats_print(ptr::null_mut());
}
_mi_verbose_message("process done: 0x%zx\n", _mi_heap_main.thread_id);
}
// Windows DLL: easy to hook into process_init and thread_done
// C++: use static initialization to detect process start
// GCC,Clang: use the constructor attribute
unsafe fn _mi_process_init() { mi_process_init(); }
| 56.144026 | 84 | 0.543406 |
de52bfab9f091e790807ebf67b602be4a2a6b5e3 | 6,477 | use std::collections::btree_map::Iter;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::ops::{Add, AddAssign};
use crate::AHashMap;
use crate::soc::bdd::differential::wd::{NcWDistribution, NWDistribution, WDCountV2};
use crate::soc::bdd::differential::wd::distribution::Node2NodeDistribution;
use crate::soc::Id;
use super::PathCount;
#[derive(Hash, Debug, Clone, Eq, PartialEq)]
pub struct EndNodeDist<W: NcWDistribution = WDCountV2> {
// For some reason, it claims that Hash is not satisfied when I try to use AHashMap..? TODO fixed?
map: BTreeMap<Id, W>,
}
impl<W: NcWDistribution> EndNodeDist<W> {
#[inline]
pub fn iter(&self) -> Iter<'_, Id, W> {
self.map.iter()
}
#[inline]
pub fn nr_of_end_nodes(&self) -> usize {
self.map.len()
}
}
impl<W: NcWDistribution> NWDistribution for EndNodeDist<W> {
const SUPPORTED_DISTRIBUTION_LEN: usize = W::SUPPORTED_DISTRIBUTION_LEN;
#[inline]
fn new_zeroed() -> Self {
Self {
map: BTreeMap::new(),
}
}
#[inline]
fn new_trivial(id: &Id) -> Self {
let mut map = BTreeMap::new();
map.insert(*id, W::new_trivial(id));
Self {
map,
}
}
#[inline]
fn increment_distribution(&mut self) {
for wdc in self.map.iter_mut() {
wdc.1.increment_distribution();
}
}
#[inline]
fn lowest_existing_weight(&self) -> u32 {
let mut lews: Vec<u32> = self.map.iter()
.map(|(_, dist)| (dist.lowest_existing_weight()))
// Would be nice if I could short-circuit immediately if 0 is seen...
.collect();
lews.sort();
lews[0]
}
#[inline]
fn lowest_existing_non_trivial_weight(&self) -> Option<u32> {
let mut lews: Vec<u32> = self.map.iter()
.filter_map(|(_, dist)| dist.lowest_existing_non_trivial_weight())
.collect();
// Old code, in case I've misunderstood this one. TODO bugsjekk!
// .map(|(_, dist)| (dist.lowest_existing_non_trivial_weight()))
// .filter_map()
// .filter(|lew| lew.is_some())
// .map(|lew| lew.unwrap())
// .collect();
if lews.is_empty() {
None
} else {
lews.sort();
Some(lews[0])
}
}
#[inline]
fn contains_trivial_lew(&self) -> bool {
for (_, dist) in self.map.iter() {
if dist.contains_trivial_lew() == true {
return true
}
}
false
}
#[inline]
fn existing_weights(&self) -> BTreeSet<u32> {
self.map.iter()
.map(|(_, dist)| dist.existing_weights())
.fold(BTreeSet::new(),
|mut acc, dist| { acc.extend(dist); acc }
)
}
}
impl<W: NcWDistribution> Node2NodeDistribution for EndNodeDist<W> {
type W = W;
#[inline]
fn nt_lew_and_e_ids(&self) -> Option<(u32, Vec<Id>)> {
let mut lew_id = BTreeMap::new();
for (id, dist) in self.map.iter() {
let lew = dist.lowest_existing_weight();
let ids = lew_id.entry(lew).or_insert(Vec::new());
ids.push(id.clone());
}
let mut iter = lew_id.iter();
// Getting lew, it may or may not be the trivial path.
let maybe_trivial = iter.next()
.expect("A weight distribution should never be completely empty!");
return if maybe_trivial.0 == &0 {
if lew_id.len() == 1 {
// The lew is the trivial lew, and its the only weight present. I.e. only the trivial lew
// is present
None
} else {
// The lew is trivial, but more weights are present. Return the lowest.
let (nt_lew, e_ids) = iter.next().unwrap();
Some((nt_lew.clone(), e_ids.clone()))
}
} else {
// The lew is no-trivial, return it.
Some((maybe_trivial.0.clone(), maybe_trivial.1.clone()))
}
}
#[inline]
fn paths_for_weight_in_id(&self, weight: u32, id: &Id) -> Option<&PathCount> {
self.map.get(id)?.paths_for_weight(weight)
}
#[inline]
fn paths_for_weight(&self, weight: u32) -> Option<AHashMap<Id, &PathCount>> {
let hm: AHashMap<Id, &PathCount> = self.map.iter()
.map(|(id, dist)| (id, dist.paths_for_weight(weight)))
.filter(|(id, dist)| dist.is_some())
.map(|(id, dist)| (id.clone(), dist.unwrap()))
.collect();
if hm.is_empty() {
None
} else {
Some(hm)
}
}
#[inline]
fn lew_with_paths_per_connection(&self) -> AHashMap<Id, (u32, &PathCount)> {
self.map.iter()
.map(|(id, dist)| (id.clone(), dist.lew_with_paths()))
.collect()
}
#[inline]
fn nt_lew_with_paths_per_connection(&self) -> Option<AHashMap<Id, (u32, &PathCount)>> {
let hm: AHashMap<Id, (u32, &PathCount)> =
self.map.iter()
.map(|(id, dist)| (id, dist.nt_lew_with_paths()))
.filter(|(id, dist)| dist.is_some())
.map(|(id, dist)| (id.clone(), dist.unwrap()) )
.collect();
if hm.is_empty() {
None
} else {
Some(hm)
}
}
#[inline]
fn existing_weights_with_paths_per_connection(&self) -> AHashMap<Id, BTreeMap<u32, &PathCount>> {
self.map.iter()
.map(|(id, dist)| (id.clone(), dist.existing_weights_with_counts()))
.collect()
}
#[inline]
fn other_node(&self, other_id: &Id) -> Option<&Self::W> {
self.map.get(other_id)
}
}
impl<W: NcWDistribution> Add<Self> for EndNodeDist<W> {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self::Output {
let mut res = self;
for (id, weight) in rhs.map.iter() {
let tot_weight = res.map.entry(*id).or_insert(W::new_zeroed());
*tot_weight += weight.clone();
}
res
}
}
impl<W: NcWDistribution> AddAssign<Self> for EndNodeDist<W> {
#[inline]
fn add_assign(&mut self, rhs: Self) {
for (id, weight) in rhs.map.iter() {
let tot_weight = self.map.entry(*id).or_insert(W::new_zeroed());
*tot_weight += weight.clone();
}
}
} | 28.786667 | 105 | 0.536359 |
267f49919057550a61e49b0e80f64de2bb82fb8b | 10,817 | #[doc = "Register `sdm1` reader"]
pub struct R(crate::R<SDM1_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SDM1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<SDM1_SPEC>> for R {
fn from(reader: crate::R<SDM1_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `sdm1` writer"]
pub struct W(crate::W<SDM1_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SDM1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<SDM1_SPEC>> for W {
fn from(writer: crate::W<SDM1_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `lo_sdm_flag` reader - "]
pub struct LO_SDM_FLAG_R(crate::FieldReader<bool, bool>);
impl LO_SDM_FLAG_R {
pub(crate) fn new(bits: bool) -> Self {
LO_SDM_FLAG_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_FLAG_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_flag` writer - "]
pub struct LO_SDM_FLAG_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_FLAG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | ((value as u32 & 0x01) << 20);
self.w
}
}
#[doc = "Field `lo_sdm_rstb_hw` reader - "]
pub struct LO_SDM_RSTB_HW_R(crate::FieldReader<bool, bool>);
impl LO_SDM_RSTB_HW_R {
pub(crate) fn new(bits: bool) -> Self {
LO_SDM_RSTB_HW_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_RSTB_HW_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_rstb_hw` writer - "]
pub struct LO_SDM_RSTB_HW_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_RSTB_HW_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | ((value as u32 & 0x01) << 17);
self.w
}
}
#[doc = "Field `lo_sdm_rstb` reader - "]
pub struct LO_SDM_RSTB_R(crate::FieldReader<bool, bool>);
impl LO_SDM_RSTB_R {
pub(crate) fn new(bits: bool) -> Self {
LO_SDM_RSTB_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_RSTB_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_rstb` writer - "]
pub struct LO_SDM_RSTB_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_RSTB_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | ((value as u32 & 0x01) << 16);
self.w
}
}
#[doc = "Field `lo_sdm_bypass` reader - "]
pub struct LO_SDM_BYPASS_R(crate::FieldReader<bool, bool>);
impl LO_SDM_BYPASS_R {
pub(crate) fn new(bits: bool) -> Self {
LO_SDM_BYPASS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_BYPASS_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_bypass` writer - "]
pub struct LO_SDM_BYPASS_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_BYPASS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | ((value as u32 & 0x01) << 12);
self.w
}
}
#[doc = "Field `lo_sdm_dither_sel` reader - "]
pub struct LO_SDM_DITHER_SEL_R(crate::FieldReader<u8, u8>);
impl LO_SDM_DITHER_SEL_R {
pub(crate) fn new(bits: u8) -> Self {
LO_SDM_DITHER_SEL_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_DITHER_SEL_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_dither_sel` writer - "]
pub struct LO_SDM_DITHER_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_DITHER_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | ((value as u32 & 0x03) << 8);
self.w
}
}
#[doc = "Field `lo_sdm_bypass_hw` reader - "]
pub struct LO_SDM_BYPASS_HW_R(crate::FieldReader<bool, bool>);
impl LO_SDM_BYPASS_HW_R {
pub(crate) fn new(bits: bool) -> Self {
LO_SDM_BYPASS_HW_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_BYPASS_HW_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_bypass_hw` writer - "]
pub struct LO_SDM_BYPASS_HW_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_BYPASS_HW_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4);
self.w
}
}
#[doc = "Field `lo_sdm_dither_sel_hw` reader - "]
pub struct LO_SDM_DITHER_SEL_HW_R(crate::FieldReader<u8, u8>);
impl LO_SDM_DITHER_SEL_HW_R {
pub(crate) fn new(bits: u8) -> Self {
LO_SDM_DITHER_SEL_HW_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LO_SDM_DITHER_SEL_HW_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `lo_sdm_dither_sel_hw` writer - "]
pub struct LO_SDM_DITHER_SEL_HW_W<'a> {
w: &'a mut W,
}
impl<'a> LO_SDM_DITHER_SEL_HW_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | (value as u32 & 0x03);
self.w
}
}
impl R {
#[doc = "Bit 20"]
#[inline(always)]
pub fn lo_sdm_flag(&self) -> LO_SDM_FLAG_R {
LO_SDM_FLAG_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 17"]
#[inline(always)]
pub fn lo_sdm_rstb_hw(&self) -> LO_SDM_RSTB_HW_R {
LO_SDM_RSTB_HW_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 16"]
#[inline(always)]
pub fn lo_sdm_rstb(&self) -> LO_SDM_RSTB_R {
LO_SDM_RSTB_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 12"]
#[inline(always)]
pub fn lo_sdm_bypass(&self) -> LO_SDM_BYPASS_R {
LO_SDM_BYPASS_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bits 8:9"]
#[inline(always)]
pub fn lo_sdm_dither_sel(&self) -> LO_SDM_DITHER_SEL_R {
LO_SDM_DITHER_SEL_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bit 4"]
#[inline(always)]
pub fn lo_sdm_bypass_hw(&self) -> LO_SDM_BYPASS_HW_R {
LO_SDM_BYPASS_HW_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 0:1"]
#[inline(always)]
pub fn lo_sdm_dither_sel_hw(&self) -> LO_SDM_DITHER_SEL_HW_R {
LO_SDM_DITHER_SEL_HW_R::new((self.bits & 0x03) as u8)
}
}
impl W {
#[doc = "Bit 20"]
#[inline(always)]
pub fn lo_sdm_flag(&mut self) -> LO_SDM_FLAG_W {
LO_SDM_FLAG_W { w: self }
}
#[doc = "Bit 17"]
#[inline(always)]
pub fn lo_sdm_rstb_hw(&mut self) -> LO_SDM_RSTB_HW_W {
LO_SDM_RSTB_HW_W { w: self }
}
#[doc = "Bit 16"]
#[inline(always)]
pub fn lo_sdm_rstb(&mut self) -> LO_SDM_RSTB_W {
LO_SDM_RSTB_W { w: self }
}
#[doc = "Bit 12"]
#[inline(always)]
pub fn lo_sdm_bypass(&mut self) -> LO_SDM_BYPASS_W {
LO_SDM_BYPASS_W { w: self }
}
#[doc = "Bits 8:9"]
#[inline(always)]
pub fn lo_sdm_dither_sel(&mut self) -> LO_SDM_DITHER_SEL_W {
LO_SDM_DITHER_SEL_W { w: self }
}
#[doc = "Bit 4"]
#[inline(always)]
pub fn lo_sdm_bypass_hw(&mut self) -> LO_SDM_BYPASS_HW_W {
LO_SDM_BYPASS_HW_W { w: self }
}
#[doc = "Bits 0:1"]
#[inline(always)]
pub fn lo_sdm_dither_sel_hw(&mut self) -> LO_SDM_DITHER_SEL_HW_W {
LO_SDM_DITHER_SEL_HW_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "sdm1.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sdm1](index.html) module"]
pub struct SDM1_SPEC;
impl crate::RegisterSpec for SDM1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [sdm1::R](R) reader structure"]
impl crate::Readable for SDM1_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [sdm1::W](W) writer structure"]
impl crate::Writable for SDM1_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets sdm1 to value 0"]
impl crate::Resettable for SDM1_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.554645 | 390 | 0.583618 |
2f10a9cc15a41560bdcb1c2f4bd2221c0429b7bf | 20,919 | use crate::attr::Attributes;
use crate::utils::err;
use proc_macro2::Span;
use proc_macro2::TokenStream;
use proc_macro2::TokenTree;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::AttributeArgs;
use syn::Error;
use syn::Expr;
use syn::Item;
use syn::ItemFn;
use syn::Result;
use syn::Token;
use syn::Type;
#[allow(unused_macros)]
macro_rules! ugly_prefix {
() => {
__crayfish_macro_helper_function__
};
}
fn prepend_ugly_prefix(suffix: &str) -> TokenStream {
let ident: TokenStream = format!("__crayfish_d81432540815a7cb_{}", suffix)
.parse()
.unwrap();
quote!(#ident)
}
fn context_arg_name() -> TokenStream {
prepend_ugly_prefix("arg_ctx")
}
fn at_async_fn_name(fn_name: &TokenStream) -> TokenStream {
prepend_ugly_prefix(&format!("at_async_{}", fn_name))
}
fn at_ff_fn_name(fn_name: &TokenStream) -> TokenStream {
prepend_ugly_prefix(&format!("at_ff_{}", fn_name))
}
struct HelperFunctionsGenerator {
crayfish_path: TokenStream,
fn_id: TokenStream,
fn_name: TokenStream,
params: Vec<(String, Type)>,
pub ret_type: TokenStream,
}
impl HelperFunctionsGenerator {
fn infer_ret_by_future(ret_ty: &Type) -> Result<TokenStream> {
const INFER_ERR_MSG: &str =
"can not infer return type. Please use 'async' keyword or set a attribute: #[activity(ret = \"Type\")]";
let ret = match ret_ty {
syn::Type::Path(p) => {
// only match BoxFuture<'a, ret_type>
if p.qself.is_none() && p.path.segments.len() == 1 {
let box_fut_t = p.path.segments.last().unwrap();
if &format!("{}", box_fut_t.ident) == "BoxFuture" {
if let syn::PathArguments::AngleBracketed(ref pargs) = box_fut_t.arguments {
if pargs.args.len() == 2 {
if let syn::GenericArgument::Type(t) = pargs.args.last().unwrap() {
return Ok(quote!(#t));
}
}
}
}
}
err(ret_ty, INFER_ERR_MSG)?
}
syn::Type::ImplTrait(p) => {
// only match impl Future<Output=ret>
if p.bounds.len() == 1 {
if let syn::TypeParamBound::Trait(t) = p.bounds.last().unwrap() {
if t.path.segments.len() == 1 {
let output = t.path.segments.last().unwrap();
let trait_ident = &output.ident;
if "e!(#trait_ident).to_string() == "Future" {
if let syn::PathArguments::AngleBracketed(ref pargs) =
output.arguments
{
if pargs.args.len() == 1 {
if let syn::GenericArgument::Binding(b) =
pargs.args.last().unwrap()
{
let ty = &b.ty;
return Ok(quote!(#ty));
}
}
}
}
}
}
}
err(ret_ty, INFER_ERR_MSG)?
}
_ => err(ret_ty, INFER_ERR_MSG)?,
};
Ok(ret)
}
fn infer_ret(function: &ItemFn) -> Result<TokenStream> {
let ItemFn {
sig: syn::Signature {
output, asyncness, ..
},
..
} = function;
let tk = match asyncness {
Some(_) => match output {
syn::ReturnType::Default => quote!(()),
syn::ReturnType::Type(_, t) => quote!(#t),
},
None => match output {
syn::ReturnType::Default => err(output, "should return a future")?,
syn::ReturnType::Type(_, t) => Self::infer_ret_by_future(&**t)?,
},
};
Ok(tk)
}
fn new(function: &ItemFn, crayfish_path: &TokenStream, attrs: &Attributes) -> Result<Self> {
let crayfish_path = crayfish_path.clone();
let ItemFn {
sig: syn::Signature { ident, inputs, .. },
..
} = function;
let fn_name: TokenStream = quote!(#ident);
let file: TokenStream = file!().parse().unwrap();
let line: TokenStream = line!().to_string().parse().unwrap();
let path: TokenStream = module_path!().parse().unwrap();
let fn_id: TokenStream = fn_hash(&fn_name, &file, &line, &path)
.to_string()
.parse()
.unwrap();
let ret_type: TokenStream = match &attrs.ret_type {
Some(t) => quote!(#t),
None => Self::infer_ret(function)?,
};
// first param is impl Context
let params = inputs.clone().into_iter();
let params: Vec<(String, Type)> = params
.enumerate()
.map(|(i, p)| match p {
syn::FnArg::Typed(pt) => (format!("__crayfish_arg{}", i), *pt.ty),
_ => panic!("method not implemented"),
})
.collect();
Ok(HelperFunctionsGenerator {
crayfish_path,
fn_id,
fn_name,
params,
ret_type,
})
}
fn punctuated_params(&self) -> TokenStream {
let ps: Vec<_> = self
.params
.iter()
.map(|(ref ident, ref ty)| {
format!("{}:{}", ident, quote!(#ty))
.parse::<TokenStream>()
.unwrap()
})
.collect();
quote!(#(#ps),*)
}
fn param_ident_list(&self) -> Vec<TokenStream> {
self.params
.iter()
.map(|(ref ident, _)| ident.parse::<TokenStream>().unwrap())
.collect()
}
fn handler_fn_name(&self) -> TokenStream {
prepend_ugly_prefix(&format!("handler_{}", self.fn_name))
}
fn execute_fn_name(&self) -> TokenStream {
prepend_ugly_prefix(&format!("execute_{}", self.fn_name))
}
fn gen_at_ff(&self) -> TokenStream {
let crayfish_path = &self.crayfish_path;
let fn_id = &self.fn_id;
let at_ff_fn_name = at_ff_fn_name(&self.fn_name);
let execute_fn_name = self.execute_fn_name();
let punctuated_params = self.punctuated_params();
let param_ident_list = self.param_ident_list();
let profiling_label =
syn::LitStr::new(&format!("{}_ff_serialize", self.fn_name), Span::call_site());
quote! {
fn #at_ff_fn_name(
a_id: #crayfish_path::activity::ActivityId,
dst_place: #crayfish_path::place::Place,
#punctuated_params
) {
let fn_id = #fn_id; // macro
if dst_place == #crayfish_path::place::here() {
#crayfish_path::spawn(#execute_fn_name(a_id, true, #(#param_ident_list),*)); // macro
} else {
// trace!("spawn activity:{} at place: {}", a_id, dst_place);
let mut builder = #crayfish_path::activity::TaskItemBuilder::new(fn_id, dst_place, a_id);
#crayfish_path::profiling_start!(#profiling_label);
#(builder.arg(#param_ident_list);)*
#crayfish_path::profiling_stop!();
let item = builder.build_box();
use #crayfish_path::runtime::ApgasContext;
#crayfish_path::runtime::ConcreteContext::send(item);
}
}
}
}
fn gen_at_async(&self) -> TokenStream {
let crayfish_path = &self.crayfish_path;
let fn_id = &self.fn_id;
let ret_type = &self.ret_type;
let at_async_fn_name = at_async_fn_name(&self.fn_name);
let execute_fn_name = self.execute_fn_name();
let punctuated_params = self.punctuated_params();
let param_ident_list = self.param_ident_list();
let profiling_label = syn::LitStr::new(
&format!("{}_async_serialize", self.fn_name),
Span::call_site(),
);
quote! {
fn #at_async_fn_name(
a_id: #crayfish_path::activity::ActivityId,
dst_place: #crayfish_path::place::Place,
#punctuated_params
) -> impl #crayfish_path::re_export::futures::Future<Output = #ret_type > {
let fn_id = #fn_id; // macro
let f = #crayfish_path::runtime::wait_single::<#ret_type>(a_id); // macro
if dst_place == #crayfish_path::place::here() {
#crayfish_path::spawn(#execute_fn_name(a_id, true, #(#param_ident_list),*)); // macro
} else {
// trace!("spawn activity:{} at place: {}", a_id, dst_place);
let mut builder = #crayfish_path::activity::TaskItemBuilder::new(fn_id, dst_place, a_id);
#crayfish_path::profiling_start!(#profiling_label);
#(builder.arg(#param_ident_list);)*
#crayfish_path::profiling_stop!();
builder.waited();
let item = builder.build_box();
use #crayfish_path::runtime::ApgasContext;
#crayfish_path::runtime::ConcreteContext::send(item);
}
f
}
}
}
fn gen_execute(&self) -> TokenStream {
let crayfish_path = &self.crayfish_path;
let fn_id = &self.fn_id;
let fn_name = &self.fn_name;
let execute_fn_name = self.execute_fn_name();
let punctuated_params = self.punctuated_params();
let param_ident_list = self.param_ident_list();
quote! {
async fn #execute_fn_name(a_id: #crayfish_path::activity::ActivityId, waited: ::std::primitive::bool, #punctuated_params) {
let fn_id = #fn_id; // macro
use #crayfish_path::global_id::ActivityIdMethods;
use #crayfish_path::re_export::futures::FutureExt;
let finish_id = a_id.get_finish_id();
use #crayfish_path::runtime::ApgasContext;
let mut ctx = #crayfish_path::runtime::ConcreteContext::inherit(finish_id);
// ctx seems to be unwind safe
let future = ::std::panic::AssertUnwindSafe(#fn_name(&mut ctx, #(#param_ident_list),* )); //macro
let result = future.catch_unwind().await;
#crayfish_path::essence::send_activity_result(ctx, a_id, fn_id, waited, result);
}
}
}
fn gen_handler(&self) -> TokenStream {
let crayfish_path = &self.crayfish_path;
let fn_id = &self.fn_id;
let handler_fn_name = self.handler_fn_name();
let execute_fn_name = self.execute_fn_name();
let extract_args =
(0..self.params.len()).map(|i| format!("arg{}", i).parse::<TokenStream>().unwrap());
let arg_stmts = (0..self.params.len())
.map(|i| syn::parse_str::<syn::Stmt>(&format!("let arg{} = e.arg();", i)).unwrap());
let profiling_label = syn::LitStr::new(
&format!("{}_deserialization", self.fn_name),
Span::call_site(),
);
quote! {
fn #handler_fn_name(item: #crayfish_path::activity::TaskItem) -> #crayfish_path::re_export::futures::future::BoxFuture<'static, ()> {
use #crayfish_path::re_export::futures::FutureExt;
async move {
let waited = item.is_waited();
let mut e = #crayfish_path::activity::TaskItemExtracter::new(item);
let a_id = e.activity_id();
// wait until function return
use #crayfish_path::global_id::ActivityIdMethods;
// #crayfish_path::logging::trace!(
// "Got activity:{} from {}", a_id, a_id.get_spawned_place()
// );
#crayfish_path::profiling_start!(#profiling_label);
#(#arg_stmts)*
#crayfish_path::profiling_stop!();
#execute_fn_name(a_id, waited, #(#extract_args),*).await;
}
.boxed()
}
// register function
const _:() = {
use #crayfish_path::inventory;
#crayfish_path::inventory::submit! {
#crayfish_path::runtime_meta::FunctionMetaData::new(
#fn_id,
#handler_fn_name,
::std::string::String::from("basic"),
::std::string::String::from(::std::file!()),
::std::line!(),
::std::string::String::from(::std::module_path!())
)
};
};
}
}
}
fn _expand_async_func(attrs: Attributes, function: ItemFn) -> Result<TokenStream> {
// TODO: support re-export crayfish
//
let crayfish_path: TokenStream = attrs.get_path();
let gen = HelperFunctionsGenerator::new(&function, &crayfish_path, &attrs)?;
let execute_fn = gen.gen_execute();
let handler_fn = gen.gen_handler();
let at_async_fn = gen.gen_at_async();
let at_ff_fn = gen.gen_at_ff();
let mut function = function;
// modify fn
let ItemFn {
ref mut sig,
ref mut block,
..
} = function;
let context_arg_name = context_arg_name();
let arg_token;
// change to boxed
if sig.asyncness.is_some() {
sig.asyncness = None;
let ret_type = &gen.ret_type;
sig.output = syn::parse2(
quote!( -> #crayfish_path::re_export::futures::future::BoxFuture<'cfctxlt, #ret_type> ),
)?;
arg_token =
quote!(#context_arg_name: &'cfctxlt mut impl #crayfish_path::runtime::ApgasContext);
sig.generics = syn::parse2(quote!(<'cfctxlt>))?;
*block = Box::new(syn::parse2(quote! {
{
use #crayfish_path::re_export::futures::FutureExt;
async move #block .boxed()
}
})?)
} else {
arg_token = quote!(#context_arg_name: &mut impl #crayfish_path::runtime::ApgasContext);
}
// insert context
let context_arg: syn::FnArg = syn::parse2(arg_token)?;
sig.inputs.insert(0, context_arg);
Ok(quote!(
#function
#execute_fn
#handler_fn
#at_async_fn
#at_ff_fn
))
}
pub(crate) fn expand_async_func(attrs: Attributes, item: Item) -> Result<TokenStream> {
if let Item::Fn(function) = item {
verify_func(&function)?;
_expand_async_func(attrs, function)
} else {
Err(Error::new_spanned(item, "only support function item"))
}
}
// TODO: support method
fn verify_func(func: &ItemFn) -> Result<()> {
let generics = &func.sig.generics;
if !func.sig.inputs.is_empty() {
let first_arg = &func.sig.inputs.first().unwrap();
if let syn::FnArg::Receiver(_) = first_arg {
return Err(Error::new_spanned(
first_arg,
"currently doesn't support method",
));
}
}
if func.sig.variadic.is_some() {
return Err(Error::new_spanned(
&func.sig.variadic,
"Crayfish doesn't support variadic functions",
));
}
if !generics.params.is_empty() {
Err(Error::new_spanned(
generics.clone(),
"Crayfish currently doesn't support generics",
))
} else {
Ok(())
}
}
fn fn_hash(
fn_name: &TokenStream,
file: &TokenStream,
line: &TokenStream,
path: &TokenStream,
) -> u64 {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut hasher = DefaultHasher::new();
let s = quote! { #fn_name #file #line #path }.to_string();
hasher.write(s.as_bytes());
hasher.finish()
}
pub enum SpawnMethod {
At,
FireAndForget,
}
pub fn expand_at(input: proc_macro::TokenStream, spawn: SpawnMethod) -> Result<TokenStream> {
let parser = Punctuated::<Expr, Token![,]>::parse_separated_nonempty;
use syn::parse::Parser;
let args = parser.parse(input)?;
let mut args = args;
// check args num
let expected_arg_num: usize = 2;
if args.len() != expected_arg_num {
err(
&args,
format!(
"this macro takes {} argument but {} arguments were supplied",
expected_arg_num,
args.len()
),
)?;
}
// get func name & call args
let call = args.pop().unwrap().into_value();
let (async_func_name, call_args) =
match call {
Expr::Call(syn::ExprCall {
attrs, func, args, ..
}) => {
if !attrs.is_empty() {
err(&attrs[0], "Crayfish doesn't suport attribute(s) here")?;
}
let func_name = match *func {
Expr::Path(p) => {
let mut p = p;
let mut last = p.path.segments.pop().unwrap().into_value();
if !last.arguments.is_empty() {
err(&last, "Crayfish doesn't support generic function")?;
}
let last_ident = &last.ident;
let last_ident_str = match spawn {
SpawnMethod::At => at_async_fn_name,
SpawnMethod::FireAndForget => at_ff_fn_name,
}("e!(#last_ident))
.to_string();
last.ident = syn::Ident::new(last_ident_str.as_str(), last.ident.span());
p.path.segments.push(last);
p
}
thing => return err(thing, "must be a proper function name"),
};
(func_name, args)
}
Expr::MethodCall(_) => return err(&call, "haven't support method call yet."),
_ => return err(
&call,
"the second argument must be call-like expression: \"func_name(arg0, arg1, ..)\"",
),
};
let place = args.pop();
let context_arg_name = context_arg_name();
let ret = quote! {#async_func_name(#context_arg_name.spawn(), #place #call_args)};
Ok(ret)
}
pub fn finish(args: Option<AttributeArgs>, input: proc_macro::TokenStream) -> Result<TokenStream> {
let attrs = match args {
Some(args) => Attributes::new(args)?,
None => Attributes::default(),
};
let block = TokenStream::from(input);
// error if return inside finish block
for tree in block.clone().into_iter() {
match tree {
TokenTree::Ident(id) => {
if &id.to_string() == "return" {
return err(id, "returning from finish blocks is not allowed until the async closure become stable in Rust");
}
}
TokenTree::Punct(p) => {
if p.as_char() == '?' {
return err(p, "try expression is not allowed in finish block. TODO: this check might be false positive.");
}
}
_ => (),
}
}
let crayfish_path = attrs.get_path();
let context_arg_name = context_arg_name();
let ret = quote! {
{
use crayfish::runtime::ApgasContext;
let mut #context_arg_name = #crayfish_path::runtime::ConcreteContext::new_frame();
let _block_ret = {
#block
};
#crayfish_path::runtime::wait_all(#context_arg_name).await;
_block_ret
}
};
Ok(ret)
}
pub fn main(args: AttributeArgs, main: ItemFn) -> Result<TokenStream> {
let attrs = Attributes::new(args)?;
let crayfish_path = attrs.get_path();
if main.sig.asyncness.is_none() {
return err(&main.sig, "Crayfish requires main function to be 'async'");
}
let mut main = main;
// change main ident
// check args. if empty, insert
if main.sig.inputs.is_empty() {
let arg = syn::parse2::<syn::FnArg>(quote!(_: ::std::vec::Vec<::std::string::String>))?;
let args: Punctuated<syn::FnArg, Token![,]> = vec![arg].into_iter().collect();
main.sig.inputs = args
}
// rename func
let user_main_name = &main.sig.ident;
let user_main_name = prepend_ugly_prefix(quote!(#user_main_name).to_string().as_str());
main.sig.ident = syn::Ident::new(&user_main_name.to_string(), main.sig.ident.span());
let output = &main.sig.output;
let ret = quote!(
#main
pub fn main() #output{
#crayfish_path::essence::genesis(#user_main_name)
}
);
Ok(ret)
}
| 34.070033 | 141 | 0.526603 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.