hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
0a40bd9b2e57a7840fb8a16929becf6018e8d1f2 | 3,476 | use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
//serialization is used to "bundle" the contract so that it can be put on chain (allows compilation to wasm)
use near_sdk::collections::LookupMap;
use near_sdk::{env, near_bindgen};
near_sdk::setup_alloc!();
//by default, creating a new cargo package will include main.rs
//this must be renamed to lib.rs so that the file will be treated as a library
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize)]
pub struct StatusMessage { //In Rust, the struct and its data fields are defined first, and methods are implemented later
records: LookupMap<String, String>, //map to store records
}
impl Default for StatusMessage { //default impl not required, but provides an initial state
fn default() -> Self {
Self {
records: LookupMap::new(b"r".to_vec()), //what is this?
}
}
}
#[near_bindgen]
impl StatusMessage {
pub fn set_status(&mut self, message: String) {
let account_id = env::signer_account_id();
//env is used to access data such as the signer account id, account balance, or other smart-contract-specific data
self.records.insert(&account_id, &message); //passing references to LookupMap::insert(String, String), doesn't require copying data
//inserts key-value pair into map
}
pub fn get_status(&self, account_id: String) -> Option<String> {
return self.records.get(&account_id); //returns the value associated with the account_id key
}
}
#[cfg(not(target_arch = "wasm32"))] //conditional compilation flags used to specify testing
#[cfg(test)] //test macro
mod tests {
use super::*;
use near_sdk::MockedBlockchain;
use near_sdk::{testing_env, VMContext};
//used to set up a simulation environment
fn get_context(input: Vec<u8>, is_view: bool) -> VMContext {
VMContext {
current_account_id: "alice_near".to_string(),
signer_account_id: "bob_near".to_string(),
signer_account_pk: vec![0, 1, 2],
predecessor_account_id: "carol_near".to_string(),
input,
block_index: 0,
block_timestamp: 0,
account_balance: 0,
account_locked_balance: 0,
storage_usage: 0,
attached_deposit: 0,
prepaid_gas: 10u64.pow(18),
random_seed: vec![0, 1, 2],
is_view,
output_data_receivers: vec![],
epoch_height: 0,
}
}
#[test]
fn set_get_message() {
let context = get_context(vec![], false); //get VMNcontext
testing_env!(context); //set testing environment
let mut contract = StatusMessage::default(); //instantiate default StatusMessage struct
contract.set_status("hello".to_string()); //set status to "hello"
assert_eq!(
"hello".to_string(),
contract.get_status("bob_near".to_string()).unwrap()
); //asserts that contract status is equal to "hello", and passes test if true
}
#[test]
fn get_nonexistent_message() {
let context = get_context(vec![], true);
testing_env!(context);
let contract = StatusMessage::default();
assert_eq!(None, contract.get_status("francis.near".to_string())); //important to note that None has a type in Rust
}
//Testing Process
//1. set up context
//2. instantiate struct
//3. call methods
//4. assert equality
}
| 37.782609 | 139 | 0.647871 |
1db88633dd8f242e07ae6021a7d330fca357be20 | 2,194 | // Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use anyhow::{Context, Error};
use fidl::endpoints::create_endpoints;
use fidl_fuchsia_game_tennis::{PaddleRequest, TennisServiceMarker};
use fuchsia_async as fasync;
use fuchsia_component::client::connect_to_service;
use futures::TryStreamExt;
use parking_lot::Mutex;
use std::io::{self, Read};
use std::sync::Arc;
fn main() -> Result<(), Error> {
let mut executor = fasync::Executor::new().context("Error creating executor")?;
let tennis_service = connect_to_service::<TennisServiceMarker>()?;
let (client_end, paddle_controller) = create_endpoints()?;
let (mut prs, paddle_control_handle) = paddle_controller.into_stream_and_control_handle()?;
tennis_service.register_paddle("Human", client_end)?;
let i_am_player_2 = Arc::new(Mutex::new(false));
let i_am_player_2_clone = i_am_player_2.clone();
println!("registering with game service");
fasync::Task::spawn(async move {
while let Some(PaddleRequest::NewGame { is_player_2, .. }) = prs.try_next().await.unwrap() {
// TODO: remove unwrap
if is_player_2 {
println!("I am player 2");
} else {
println!("I am player 1");
}
*i_am_player_2_clone.lock() = is_player_2
}
})
.detach();
let resp: Result<(), Error> = executor.run_singlethreaded(async move {
while let Some(input) = io::stdin().lock().bytes().next() {
match input {
Ok(65) => {
println!("moving up");
paddle_control_handle.send_up()?;
}
Ok(66) => {
println!("moving down");
paddle_control_handle.send_down()?;
}
Ok(32) => {
println!("stopping");
paddle_control_handle.send_stop()?;
}
Ok(_) => (),
Err(e) => println!("Error is {:?}", e),
};
}
Ok(())
});
resp
}
| 37.827586 | 100 | 0.569736 |
f8cdc9e8021129fc4fcc5a67f7414652ca961ef0 | 3,093 | use dominator::clone;
use dominator_helpers::futures::AsyncLoader;
use super::super::{base::state::*, state::GenericState, state::*};
use std::future::Future;
use std::{marker::PhantomData, rc::Rc};
use shared::domain::jig::module::body::{BodyExt, ModeExt, StepExt};
use utils::prelude::*;
pub struct Choose<RawData, Mode, Step>
where
RawData: BodyExt<Mode, Step> + 'static,
Mode: ModeExt + 'static,
Step: StepExt + 'static,
{
//getting rid of this Box is probably more headache than it's worth
pub on_mode_change: Box<dyn Fn(Mode)>,
pub loader: Rc<AsyncLoader>,
phantom: PhantomData<(RawData, Step)>, //TODO: might not need this once we derive the mode list from RawData
}
impl<RawData, Mode, Step> Choose<RawData, Mode, Step>
where
RawData: BodyExt<Mode, Step> + 'static,
Mode: ModeExt + 'static,
Step: StepExt + 'static,
{
pub fn new<
BaseInitFromRawFn,
BaseInitFromRawOutput,
Base,
Main,
Sidebar,
Header,
Footer,
Overlay,
>(
app: Rc<GenericState<Mode, Step, RawData, Base, Main, Sidebar, Header, Footer, Overlay>>,
init_from_raw: BaseInitFromRawFn,
) -> Self
where
Mode: ModeExt + 'static,
Step: StepExt + 'static,
RawData: BodyExt<Mode, Step> + 'static,
Base: BaseExt<Step> + 'static,
Main: MainExt + 'static,
Sidebar: SidebarExt + 'static,
Header: HeaderExt + 'static,
Footer: FooterExt + 'static,
Overlay: OverlayExt + 'static,
BaseInitFromRawFn:
Fn(BaseInitFromRawArgs<RawData, Mode, Step>) -> BaseInitFromRawOutput + Clone + 'static,
BaseInitFromRawOutput:
Future<Output = BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay>>,
{
let loader = Rc::new(AsyncLoader::new());
Self {
phantom: PhantomData,
loader: loader.clone(),
on_mode_change: Box::new(move |mode| {
loader.load(clone!(init_from_raw, app => async move {
let (jig_id, module_id, jig) = (
app.opts.jig_id.clone(),
app.opts.module_id.clone(),
app.jig.borrow().clone().unwrap_ji()
);
let raw = RawData::new_mode(mode);
let history = app.history.borrow().as_ref().unwrap_ji().clone();
history.push_modify(clone!(raw => |init| {
*init = raw;
}));
GenericState::change_phase_base(
app.clone(),
init_from_raw.clone(),
BaseInitFromRawArgs::new(
jig_id,
module_id,
jig,
raw,
InitSource::ChooseMode,
history
)
).await;
}))
}),
}
}
}
| 32.21875 | 112 | 0.515681 |
9ba9d62dffb03ecf33401a4726f653bed9191d52 | 867 | use crossbeam::channel::{bounded, unbounded};
use pipeviewer::{args::Args, read, stats, write};
use std::io::Result;
use std::thread;
fn main() -> Result<()> {
let args = Args::parse();
let Args {
infile,
outfile,
silent,
} = args;
let (stats_tx, stats_rx) = unbounded();
let (write_tx, write_rx) = bounded(1024);
let read_handle = thread::spawn(move || read::read_loop(&infile, stats_tx, write_tx));
let stats_handle = thread::spawn(move || stats::stats_loop(silent, stats_rx));
let write_handle = thread::spawn(move || write::write_loop(&outfile, write_rx));
let read_io_result = read_handle.join().unwrap();
let stats_io_result = stats_handle.join().unwrap();
let write_io_result = write_handle.join().unwrap();
read_io_result?;
stats_io_result?;
write_io_result?;
Ok(())
}
| 27.967742 | 90 | 0.644752 |
dd6287878e69d19b2a669cb2e53ab86561ea6d28 | 1,312 | use std::io;
use actix_web::{web, App, HttpServer};
use tokio_postgres::NoTls;
use dotenv::dotenv;
use crate::db::check_init_db_conn;
use crate::handlers::*;
use std::io::{Error, ErrorKind};
mod config;
mod db;
mod handlers;
mod models;
#[actix_rt::main]
async fn main() -> io::Result<()> {
dotenv().ok();
let config = config::Config::from_env().unwrap();
let pool = config.pg.create_pool(NoTls).unwrap();
if !check_init_db_conn(pool.clone()).await {
return Result::Err(Error::new(ErrorKind::Other, ""));
} else {
println!(">>> DB Connection is successful. ");
}
println!(
">>> DB Pool init state: available/size = {}/{}",
pool.status().available,
pool.status().size
);
println!(
">>> Starting server listening at http://{}:{}",
config.server.host, config.server.port
);
HttpServer::new(move || {
App::new()
.data(pool.clone())
.route("/", web::get().to(status))
.route("/todos{_:/?}", web::get().to(get_todos))
.route("/todos{_:/?}", web::post().to(create_todo_list))
.route("/todos/{id}/items{_:/?}", web::get().to(get_todo_items))
})
.bind(format!("{}:{}", config.server.host, config.server.port))?
.run()
.await
}
| 24.754717 | 76 | 0.560213 |
e475670f2d8821e30c110b8552f9383bde0fa13c | 2,418 | use crate::protocol::parts::{OptionValue, TaFlagId, TransactionFlags};
// Session state.
#[derive(Debug)]
pub(crate) struct SessionState {
pub ta_state: TransactionState,
pub isolation_level: u8,
pub ddl_commit_mode: bool, // unclear
pub read_only_mode: bool, // unclear
pub dead: bool,
}
impl Default for SessionState {
fn default() -> Self {
Self {
ta_state: TransactionState::Initial,
isolation_level: 0,
ddl_commit_mode: true,
read_only_mode: false,
dead: false,
}
}
}
impl SessionState {
pub fn update(&mut self, transaction_flags: TransactionFlags) {
for (id, value) in transaction_flags {
#[allow(clippy::cast_sign_loss)]
#[allow(clippy::cast_possible_truncation)]
match (id, value) {
(TaFlagId::RolledBack, OptionValue::BOOLEAN(true)) => {
self.ta_state = TransactionState::RolledBack
}
(TaFlagId::Committed, OptionValue::BOOLEAN(true)) => {
self.ta_state = TransactionState::Committed;
}
(TaFlagId::WriteTaStarted, OptionValue::BOOLEAN(true)) => {
self.ta_state = TransactionState::WriteTransaction;
}
(TaFlagId::NoWriteTaStarted, OptionValue::BOOLEAN(true)) => {
self.ta_state = TransactionState::ReadTransaction;
}
(TaFlagId::NewIsolationlevel, OptionValue::INT(i)) => {
self.isolation_level = i as u8;
}
(TaFlagId::SessionclosingTaError, OptionValue::BOOLEAN(b)) => {
self.dead = b;
}
(TaFlagId::DdlCommitmodeChanged, OptionValue::BOOLEAN(b)) => {
self.ddl_commit_mode = b;
}
(TaFlagId::ReadOnlyMode, OptionValue::BOOLEAN(b)) => {
self.read_only_mode = b;
}
(id, value) => {
warn!(
"unexpected transaction flag ignored: {:?} = {:?}",
id, value
);
}
}
}
}
}
#[derive(Debug)]
pub enum TransactionState {
Initial,
RolledBack,
Committed,
ReadTransaction,
WriteTransaction,
}
| 33.583333 | 79 | 0.514061 |
bbf814d448d0ecf9da788344cdab3eb93188b3b1 | 234,681 | /// An intended audience of the \[Product][google.cloud.retail.v2alpha.Product\]
/// for whom it's sold.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Audience {
/// The genders of the audience. Strongly encouraged to use the standard
/// values: "male", "female", "unisex".
///
/// At most 5 values are allowed. Each value must be a UTF-8 encoded string
/// with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error
/// is returned.
///
/// Google Merchant Center property
/// \[gender\](<https://support.google.com/merchants/answer/6324479>). Schema.org
/// property
/// \[Product.audience.suggestedGender\](<https://schema.org/suggestedGender>).
#[prost(string, repeated, tag = "1")]
pub genders: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The age groups of the audience. Strongly encouraged to use the standard
/// values: "newborn" (up to 3 months old), "infant" (3–12 months old),
/// "toddler" (1–5 years old), "kids" (5–13 years old), "adult" (typically
/// teens or older).
///
/// At most 5 values are allowed. Each value must be a UTF-8 encoded string
/// with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error
/// is returned.
///
/// Google Merchant Center property
/// \[age_group\](<https://support.google.com/merchants/answer/6324463>).
/// Schema.org property
/// \[Product.audience.suggestedMinAge\](<https://schema.org/suggestedMinAge>) and
/// \[Product.audience.suggestedMaxAge\](<https://schema.org/suggestedMaxAge>).
#[prost(string, repeated, tag = "2")]
pub age_groups: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// The color information of a \[Product][google.cloud.retail.v2alpha.Product\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ColorInfo {
/// The standard color families. Strongly recommended to use the following
/// standard color groups: "Red", "Pink", "Orange", "Yellow", "Purple",
/// "Green", "Cyan", "Blue", "Brown", "White", "Gray", "Black" and
/// "Mixed". Normally it is expected to have only 1 color family. May consider
/// using single "Mixed" instead of multiple values.
///
/// A maximum of 5 values are allowed. Each value must be a UTF-8 encoded
/// string with a length limit of 128 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[color\](<https://support.google.com/merchants/answer/6324487>). Schema.org
/// property \[Product.color\](<https://schema.org/color>).
#[prost(string, repeated, tag = "1")]
pub color_families: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The color display names, which may be different from standard color family
/// names, such as the color aliases used in the website frontend. Normally
/// it is expected to have only 1 color. May consider using single "Mixed"
/// instead of multiple values.
///
/// A maximum of 25 colors are allowed. Each value must be a UTF-8 encoded
/// string with a length limit of 128 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[color\](<https://support.google.com/merchants/answer/6324487>). Schema.org
/// property \[Product.color\](<https://schema.org/color>).
#[prost(string, repeated, tag = "2")]
pub colors: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// A custom attribute that is not explicitly modeled in
/// \[Product][google.cloud.retail.v2alpha.Product\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CustomAttribute {
/// The textual values of this custom attribute. For example, `["yellow",
/// "green"]` when the key is "color".
///
/// At most 400 values are allowed. Empty values are not allowed. Each value
/// must be a UTF-8 encoded string with a length limit of 256 characters.
/// Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Exactly one of \[text][google.cloud.retail.v2alpha.CustomAttribute.text\] or
/// \[numbers][google.cloud.retail.v2alpha.CustomAttribute.numbers\] should be
/// set. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, repeated, tag = "1")]
pub text: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The numerical values of this custom attribute. For example, `[2.3, 15.4]`
/// when the key is "lengths_cm".
///
/// At most 400 values are allowed.Otherwise, an INVALID_ARGUMENT error is
/// returned.
///
/// Exactly one of \[text][google.cloud.retail.v2alpha.CustomAttribute.text\] or
/// \[numbers][google.cloud.retail.v2alpha.CustomAttribute.numbers\] should be
/// set. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(double, repeated, tag = "2")]
pub numbers: ::prost::alloc::vec::Vec<f64>,
/// If true, custom attribute values are searchable by text queries in
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\].
///
/// This field is ignored in a
/// \[UserEvent][google.cloud.retail.v2alpha.UserEvent\].
///
/// Only set if type \[text][google.cloud.retail.v2alpha.CustomAttribute.text\]
/// is set. Otherwise, a INVALID_ARGUMENT error is returned.
#[prost(bool, optional, tag = "3")]
pub searchable: ::core::option::Option<bool>,
/// If true, custom attribute values are indexed, so that it can be filtered,
/// faceted or boosted in
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\].
///
/// This field is ignored in a
/// \[UserEvent][google.cloud.retail.v2alpha.UserEvent\].
///
/// See
/// \[SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter\],
/// \[SearchRequest.facet_specs][google.cloud.retail.v2alpha.SearchRequest.facet_specs\]
/// and
/// \[SearchRequest.boost_spec][google.cloud.retail.v2alpha.SearchRequest.boost_spec\]
/// for more details.
#[prost(bool, optional, tag = "4")]
pub indexable: ::core::option::Option<bool>,
}
/// Fulfillment information, such as the store IDs for in-store pickup or region
/// IDs for different shipping methods.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FulfillmentInfo {
/// The fulfillment type, including commonly used types (such as pickup in
/// store and same day delivery), and custom types. Customers have to map
/// custom types to their display names before rendering UI.
///
/// Supported values:
///
/// * "pickup-in-store"
/// * "ship-to-store"
/// * "same-day-delivery"
/// * "next-day-delivery"
/// * "custom-type-1"
/// * "custom-type-2"
/// * "custom-type-3"
/// * "custom-type-4"
/// * "custom-type-5"
///
/// If this field is set to an invalid value other than these, an
/// INVALID_ARGUMENT error is returned.
#[prost(string, tag = "1")]
pub r#type: ::prost::alloc::string::String,
/// The IDs for this \[type][google.cloud.retail.v2alpha.FulfillmentInfo.type\],
/// such as the store IDs for
/// \[FulfillmentInfo.type.pickup-in-store][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// or the region IDs for
/// \[FulfillmentInfo.type.same-day-delivery][google.cloud.retail.v2alpha.FulfillmentInfo.type\].
///
/// A maximum of 3000 values are allowed. Each value must be a string with a
/// length limit of 30 characters, matching the pattern `\[a-zA-Z0-9_-\]+`, such
/// as "store1" or "REGION-2". Otherwise, an INVALID_ARGUMENT error is
/// returned.
#[prost(string, repeated, tag = "2")]
pub place_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// \[Product][google.cloud.retail.v2alpha.Product\] thumbnail/detail image.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Image {
/// Required. URI of the image.
///
/// This field must be a valid UTF-8 encoded URI with a length limit of 5,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[image_link\](<https://support.google.com/merchants/answer/6324350>).
/// Schema.org property \[Product.image\](<https://schema.org/image>).
#[prost(string, tag = "1")]
pub uri: ::prost::alloc::string::String,
/// Height of the image in number of pixels.
///
/// This field must be nonnegative. Otherwise, an INVALID_ARGUMENT error is
/// returned.
#[prost(int32, tag = "2")]
pub height: i32,
/// Width of the image in number of pixels.
///
/// This field must be nonnegative. Otherwise, an INVALID_ARGUMENT error is
/// returned.
#[prost(int32, tag = "3")]
pub width: i32,
}
/// A floating point interval.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Interval {
/// The lower bound of the interval. If neither of the min fields are set, then
/// the lower bound is negative infinity.
///
/// This field must be not larger than
/// \[max][google.cloud.retail.v2alpha.Interval.max\]. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[prost(oneof = "interval::Min", tags = "1, 2")]
pub min: ::core::option::Option<interval::Min>,
/// The upper bound of the interval. If neither of the max fields are set, then
/// the upper bound is positive infinity.
///
/// This field must be not smaller than
/// \[min][google.cloud.retail.v2alpha.Interval.min\]. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[prost(oneof = "interval::Max", tags = "3, 4")]
pub max: ::core::option::Option<interval::Max>,
}
/// Nested message and enum types in `Interval`.
pub mod interval {
/// The lower bound of the interval. If neither of the min fields are set, then
/// the lower bound is negative infinity.
///
/// This field must be not larger than
/// \[max][google.cloud.retail.v2alpha.Interval.max\]. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Min {
/// Inclusive lower bound.
#[prost(double, tag = "1")]
Minimum(f64),
/// Exclusive lower bound.
#[prost(double, tag = "2")]
ExclusiveMinimum(f64),
}
/// The upper bound of the interval. If neither of the max fields are set, then
/// the upper bound is positive infinity.
///
/// This field must be not smaller than
/// \[min][google.cloud.retail.v2alpha.Interval.min\]. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Max {
/// Inclusive upper bound.
#[prost(double, tag = "3")]
Maximum(f64),
/// Exclusive upper bound.
#[prost(double, tag = "4")]
ExclusiveMaximum(f64),
}
}
/// The price information of a \[Product][google.cloud.retail.v2alpha.Product\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PriceInfo {
/// The 3-letter currency code defined in [ISO
/// 4217](<https://www.iso.org/iso-4217-currency-codes.html>).
///
/// If this field is an unrecognizable currency code, an INVALID_ARGUMENT
/// error is returned.
///
/// The
/// \[Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s with the same
/// \[Product.primary_product_id][google.cloud.retail.v2alpha.Product.primary_product_id\]
/// must share the same
/// \[currency_code][google.cloud.retail.v2alpha.PriceInfo.currency_code\].
/// Otherwise, a FAILED_PRECONDITION error is returned.
#[prost(string, tag = "1")]
pub currency_code: ::prost::alloc::string::String,
/// Price of the product.
///
/// Google Merchant Center property
/// \[price\](<https://support.google.com/merchants/answer/6324371>). Schema.org
/// property \[Offer.priceSpecification\](<https://schema.org/priceSpecification>).
#[prost(float, tag = "2")]
pub price: f32,
/// Price of the product without any discount. If zero, by default set to be
/// the \[price][google.cloud.retail.v2alpha.PriceInfo.price\].
#[prost(float, tag = "3")]
pub original_price: f32,
/// The costs associated with the sale of a particular product. Used for gross
/// profit reporting.
///
/// * Profit = \[price][google.cloud.retail.v2alpha.PriceInfo.price\] -
/// \[cost][google.cloud.retail.v2alpha.PriceInfo.cost\]
///
/// Google Merchant Center property
/// \[cost_of_goods_sold\](<https://support.google.com/merchants/answer/9017895>).
#[prost(float, tag = "4")]
pub cost: f32,
/// The timestamp when the \[price][google.cloud.retail.v2alpha.PriceInfo.price\]
/// starts to be effective. This can be set as a future timestamp, and the
/// \[price][google.cloud.retail.v2alpha.PriceInfo.price\] is only used for
/// search after
/// \[price_effective_time][google.cloud.retail.v2alpha.PriceInfo.price_effective_time\].
/// If so, the
/// \[original_price][google.cloud.retail.v2alpha.PriceInfo.original_price\] must
/// be set and
/// \[original_price][google.cloud.retail.v2alpha.PriceInfo.original_price\] is
/// used before
/// \[price_effective_time][google.cloud.retail.v2alpha.PriceInfo.price_effective_time\].
///
/// Do not set if \[price][google.cloud.retail.v2alpha.PriceInfo.price\] is
/// always effective because it will cause additional latency during search.
#[prost(message, optional, tag = "5")]
pub price_effective_time: ::core::option::Option<::prost_types::Timestamp>,
/// The timestamp when the \[price][google.cloud.retail.v2alpha.PriceInfo.price\]
/// stops to be effective. The
/// \[price][google.cloud.retail.v2alpha.PriceInfo.price\] is used for search
/// before
/// \[price_expire_time][google.cloud.retail.v2alpha.PriceInfo.price_expire_time\].
/// If this field is set, the
/// \[original_price][google.cloud.retail.v2alpha.PriceInfo.original_price\] must
/// be set and
/// \[original_price][google.cloud.retail.v2alpha.PriceInfo.original_price\] is
/// used after
/// \[price_expire_time][google.cloud.retail.v2alpha.PriceInfo.price_expire_time\].
///
/// Do not set if \[price][google.cloud.retail.v2alpha.PriceInfo.price\] is
/// always effective because it will cause additional latency during search.
#[prost(message, optional, tag = "6")]
pub price_expire_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. The price range of all the child
/// \[Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s grouped together on the
/// \[Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]. Only populated for
/// \[Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s.
///
/// Note: This field is OUTPUT_ONLY for
/// \[ProductService.GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct\].
/// Do not set this field in API requests.
#[prost(message, optional, tag = "7")]
pub price_range: ::core::option::Option<price_info::PriceRange>,
}
/// Nested message and enum types in `PriceInfo`.
pub mod price_info {
/// The price range of all
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\] having the same
/// \[Product.primary_product_id][google.cloud.retail.v2alpha.Product.primary_product_id\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PriceRange {
/// The inclusive
/// \[Product.pricing_info.price][google.cloud.retail.v2alpha.PriceInfo.price\]
/// interval of all
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\] having the same
/// \[Product.primary_product_id][google.cloud.retail.v2alpha.Product.primary_product_id\].
#[prost(message, optional, tag = "1")]
pub price: ::core::option::Option<super::Interval>,
/// The inclusive
/// \[Product.pricing_info.original_price][google.cloud.retail.v2alpha.PriceInfo.original_price\]
/// internal of all
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\] having the same
/// \[Product.primary_product_id][google.cloud.retail.v2alpha.Product.primary_product_id\].
#[prost(message, optional, tag = "2")]
pub original_price: ::core::option::Option<super::Interval>,
}
}
/// The rating of a \[Product][google.cloud.retail.v2alpha.Product\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Rating {
/// The total number of ratings. This value is independent of the value of
/// \[rating_histogram][google.cloud.retail.v2alpha.Rating.rating_histogram\].
///
/// This value must be nonnegative. Otherwise, an INVALID_ARGUMENT error is
/// returned.
#[prost(int32, tag = "1")]
pub rating_count: i32,
/// The average rating of the \[Product][google.cloud.retail.v2alpha.Product\].
///
/// The rating is scaled at 1-5. Otherwise, an INVALID_ARGUMENT error is
/// returned.
#[prost(float, tag = "2")]
pub average_rating: f32,
/// List of rating counts per rating value (index = rating - 1). The list is
/// empty if there is no rating. If the list is non-empty, its size is
/// always 5. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// For example, [41, 14, 13, 47, 303]. It means that the
/// \[Product][google.cloud.retail.v2alpha.Product\] got 41 ratings with 1 star,
/// 14 ratings with 2 star, and so on.
#[prost(int32, repeated, tag = "3")]
pub rating_histogram: ::prost::alloc::vec::Vec<i32>,
}
/// Information of an end user.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserInfo {
/// Highly recommended for logged-in users. Unique identifier for logged-in
/// user, such as a user name.
///
/// The field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "1")]
pub user_id: ::prost::alloc::string::String,
/// The end user's IP address. Required for getting
/// \[SearchResponse.sponsored_results][google.cloud.retail.v2alpha.SearchResponse.sponsored_results\].
/// This field is used to extract location information for personalization.
///
/// This field must be either an IPv4 address (e.g. "104.133.9.80") or an IPv6
/// address (e.g. "2001:0db8:85a3:0000:0000:8a2e:0370:7334"). Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// This should not be set when using the JavaScript tag in
/// \[UserEventService.CollectUserEvent][google.cloud.retail.v2alpha.UserEventService.CollectUserEvent\]
/// or if
/// \[direct_user_request][google.cloud.retail.v2alpha.UserInfo.direct_user_request\]
/// is set.
#[prost(string, tag = "2")]
pub ip_address: ::prost::alloc::string::String,
/// User agent as included in the HTTP header. Required for getting
/// \[SearchResponse.sponsored_results][google.cloud.retail.v2alpha.SearchResponse.sponsored_results\].
///
/// The field must be a UTF-8 encoded string with a length limit of 1,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// This should not be set when using the client side event reporting with
/// GTM or JavaScript tag in
/// \[UserEventService.CollectUserEvent][google.cloud.retail.v2alpha.UserEventService.CollectUserEvent\]
/// or if
/// \[direct_user_request][google.cloud.retail.v2alpha.UserInfo.direct_user_request\]
/// is set.
#[prost(string, tag = "3")]
pub user_agent: ::prost::alloc::string::String,
/// True if the request is made directly from the end user, in which case the
/// \[ip_address][google.cloud.retail.v2alpha.UserInfo.ip_address\] and
/// \[user_agent][google.cloud.retail.v2alpha.UserInfo.user_agent\] can be
/// populated from the HTTP request. This flag should be set only if the API
/// request is made directly from the end user such as a mobile app (and not if
/// a gateway or a server is processing and pushing the user events).
///
/// This should not be set when using the JavaScript tag in
/// \[UserEventService.CollectUserEvent][google.cloud.retail.v2alpha.UserEventService.CollectUserEvent\].
#[prost(bool, tag = "4")]
pub direct_user_request: bool,
}
/// Promotion information.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Promotion {
/// ID of the promotion. For example, "free gift".
///
/// The value value must be a UTF-8 encoded string with a length limit of 128
/// characters, and match the pattern: `\[a-zA-Z][a-zA-Z0-9_\]*`. For example,
/// id0LikeThis or ID_1_LIKE_THIS. Otherwise, an INVALID_ARGUMENT error is
/// returned.
///
/// Google Merchant Center property
/// \[promotion\](<https://support.google.com/merchants/answer/7050148>).
#[prost(string, tag = "1")]
pub promotion_id: ::prost::alloc::string::String,
}
/// The inventory information at a place (e.g. a store) identified
/// by a place ID.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct LocalInventory {
/// The place ID for the current set of inventory information.
#[prost(string, tag = "1")]
pub place_id: ::prost::alloc::string::String,
/// Product price and cost information.
///
/// Google Merchant Center property
/// \[price\](<https://support.google.com/merchants/answer/6324371>).
#[prost(message, optional, tag = "2")]
pub price_info: ::core::option::Option<PriceInfo>,
/// Additional local inventory attributes, for example, store name, promotion
/// tags, etc.
/// * At most 5 values are allowed. Otherwise, an INVALID_ARGUMENT error is
/// returned.
/// * The key must be a UTF-8 encoded string with a length limit of 10
/// characters.
/// * The key must match the pattern: `\[a-zA-Z0-9][a-zA-Z0-9_\]*`. For example,
/// key0LikeThis or KEY_1_LIKE_THIS.
/// * The attribute values must be of the same type (text or number).
/// * The max number of values per attribute is 10.
/// * For text values, the length limit is 10 UTF-8 characters.
/// * The attribute does not support search. The `searchable` field should be
/// unset or set to false.
#[prost(map = "string, message", tag = "3")]
pub attributes: ::std::collections::HashMap<::prost::alloc::string::String, CustomAttribute>,
}
/// Product captures all metadata information of items to be recommended or
/// searched.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Product {
/// Immutable. Full resource name of the product, such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/product_id`.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Immutable. \[Product][google.cloud.retail.v2alpha.Product\] identifier, which
/// is the final component of \[name][google.cloud.retail.v2alpha.Product.name\].
/// For example, this field is "id_1", if
/// \[name][google.cloud.retail.v2alpha.Product.name\] is
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/id_1`.
///
/// This field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[id\](<https://support.google.com/merchants/answer/6324405>). Schema.org
/// Property \[Product.sku\](<https://schema.org/sku>).
#[prost(string, tag = "2")]
pub id: ::prost::alloc::string::String,
/// Immutable. The type of the product. Default to
/// \[Catalog.product_level_config.ingestion_product_type][google.cloud.retail.v2alpha.ProductLevelConfig.ingestion_product_type\]
/// if unset.
#[prost(enumeration = "product::Type", tag = "3")]
pub r#type: i32,
/// Variant group identifier. Must be an
/// \[id][google.cloud.retail.v2alpha.Product.id\], with the same parent branch
/// with this product. Otherwise, an error is thrown.
///
/// For \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s, this field can only be
/// empty or set to the same value as
/// \[id][google.cloud.retail.v2alpha.Product.id\].
///
/// For VARIANT \[Product][google.cloud.retail.v2alpha.Product\]s, this field
/// cannot be empty. A maximum of 2,000 products are allowed to share the same
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center Property
/// \[item_group_id\](<https://support.google.com/merchants/answer/6324507>).
/// Schema.org Property
/// \[Product.inProductGroupWithID\](<https://schema.org/inProductGroupWithID>).
///
/// This field must be enabled before it can be used. [Learn
/// more](/recommendations-ai/docs/catalog#item-group-id).
#[prost(string, tag = "4")]
pub primary_product_id: ::prost::alloc::string::String,
/// The \[id][google.cloud.retail.v2alpha.Product.id\] of the collection members
/// when \[type][google.cloud.retail.v2alpha.Product.type\] is
/// \[Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION\].
///
/// Should not set it for other types. A maximum of 1000 values are allowed.
/// Otherwise, an INVALID_ARGUMENT error is return.
#[prost(string, repeated, tag = "5")]
pub collection_member_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The Global Trade Item Number (GTIN) of the product.
///
/// This field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// This field must be a Unigram. Otherwise, an INVALID_ARGUMENT error is
/// returned.
///
/// Google Merchant Center property
/// \[gtin\](<https://support.google.com/merchants/answer/6324461>).
/// Schema.org property
/// \[Product.isbn\](<https://schema.org/isbn>) or
/// \[Product.gtin8\](<https://schema.org/gtin8>) or
/// \[Product.gtin12\](<https://schema.org/gtin12>) or
/// \[Product.gtin13\](<https://schema.org/gtin13>) or
/// \[Product.gtin14\](<https://schema.org/gtin14>).
///
/// If the value is not a valid GTIN, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "6")]
pub gtin: ::prost::alloc::string::String,
/// Product categories. This field is repeated for supporting one product
/// belonging to several parallel categories. Strongly recommended using the
/// full path for better search / recommendation quality.
///
///
/// To represent full path of category, use '>' sign to separate different
/// hierarchies. If '>' is part of the category name, please replace it with
/// other character(s).
///
/// For example, if a shoes product belongs to both
/// ["Shoes & Accessories" -> "Shoes"] and
/// ["Sports & Fitness" -> "Athletic Clothing" -> "Shoes"], it could be
/// represented as:
///
/// "categories": [
/// "Shoes & Accessories > Shoes",
/// "Sports & Fitness > Athletic Clothing > Shoes"
/// ]
///
/// Must be set for
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\] otherwise an
/// INVALID_ARGUMENT error is returned.
///
/// At most 250 values are allowed per
/// \[Product][google.cloud.retail.v2alpha.Product\]. Empty values are not
/// allowed. Each value must be a UTF-8 encoded string with a length limit of
/// 5,000 characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[google_product_category][mc_google_product_category\]. Schema.org property
/// \[Product.category\] (<https://schema.org/category>).
///
/// \[mc_google_product_category\]:
/// <https://support.google.com/merchants/answer/6324436>
#[prost(string, repeated, tag = "7")]
pub categories: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Required. Product title.
///
/// This field must be a UTF-8 encoded string with a length limit of 1,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[title\](<https://support.google.com/merchants/answer/6324415>). Schema.org
/// property \[Product.name\](<https://schema.org/name>).
#[prost(string, tag = "8")]
pub title: ::prost::alloc::string::String,
/// The brands of the product.
///
/// A maximum of 30 brands are allowed. Each brand must be a UTF-8 encoded
/// string with a length limit of 1,000 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[brand\](<https://support.google.com/merchants/answer/6324351>). Schema.org
/// property \[Product.brand\](<https://schema.org/brand>).
#[prost(string, repeated, tag = "9")]
pub brands: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Product description.
///
/// This field must be a UTF-8 encoded string with a length limit of 5,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[description\](<https://support.google.com/merchants/answer/6324468>).
/// schema.org property \[Product.description\](<https://schema.org/description>).
#[prost(string, tag = "10")]
pub description: ::prost::alloc::string::String,
/// Language of the title/description and other string attributes. Use language
/// tags defined by [BCP 47]\[<https://www.rfc-editor.org/rfc/bcp/bcp47.txt\].>
///
/// For product prediction, this field is ignored and the model automatically
/// detects the text language. The
/// \[Product][google.cloud.retail.v2alpha.Product\] can include text in
/// different languages, but duplicating
/// \[Product][google.cloud.retail.v2alpha.Product\]s to provide text in multiple
/// languages can result in degraded model performance.
///
/// For product search this field is in use. It defaults to "en-US" if unset.
#[prost(string, tag = "11")]
pub language_code: ::prost::alloc::string::String,
/// Highly encouraged. Extra product attributes to be included. For example,
/// for products, this could include the store name, vendor, style, color, etc.
/// These are very strong signals for recommendation model, thus we highly
/// recommend providing the attributes here.
///
/// Features that can take on one of a limited number of possible values. Two
/// types of features can be set are:
///
/// Textual features. some examples would be the brand/maker of a product, or
/// country of a customer. Numerical features. Some examples would be the
/// height/weight of a product, or age of a customer.
///
/// For example: `{ "vendor": {"text": ["vendor123", "vendor456"]},
/// "lengths_cm": {"numbers":[2.3, 15.4]}, "heights_cm": {"numbers":[8.1, 6.4]}
/// }`.
///
/// This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT
/// error is returned:
///
/// * Max entries count: 200.
/// * The key must be a UTF-8 encoded string with a length limit of 128
/// characters.
/// * For indexable attribute, the key must match the pattern:
/// `\[a-zA-Z0-9][a-zA-Z0-9_\]*`. For example, key0LikeThis or KEY_1_LIKE_THIS.
#[prost(map = "string, message", tag = "12")]
pub attributes: ::std::collections::HashMap<::prost::alloc::string::String, CustomAttribute>,
/// Custom tags associated with the product.
///
/// At most 250 values are allowed per
/// \[Product][google.cloud.retail.v2alpha.Product\]. This value must be a UTF-8
/// encoded string with a length limit of 1,000 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// This tag can be used for filtering recommendation results by passing the
/// tag as part of the
/// \[PredictRequest.filter][google.cloud.retail.v2alpha.PredictRequest.filter\].
///
/// Google Merchant Center property
/// \[custom_label_0–4\](<https://support.google.com/merchants/answer/6324473>).
#[prost(string, repeated, tag = "13")]
pub tags: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Product price and cost information.
///
/// Google Merchant Center property
/// \[price\](<https://support.google.com/merchants/answer/6324371>).
#[prost(message, optional, tag = "14")]
pub price_info: ::core::option::Option<PriceInfo>,
/// The rating of this product.
#[prost(message, optional, tag = "15")]
pub rating: ::core::option::Option<Rating>,
/// The timestamp when this \[Product][google.cloud.retail.v2alpha.Product\]
/// becomes available for
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\].
#[prost(message, optional, tag = "18")]
pub available_time: ::core::option::Option<::prost_types::Timestamp>,
/// The online availability of the
/// \[Product][google.cloud.retail.v2alpha.Product\]. Default to
/// \[Availability.IN_STOCK][google.cloud.retail.v2alpha.Product.Availability.IN_STOCK\].
///
/// Google Merchant Center Property
/// \[availability\](<https://support.google.com/merchants/answer/6324448>).
/// Schema.org Property \[Offer.availability\](<https://schema.org/availability>).
#[prost(enumeration = "product::Availability", tag = "19")]
pub availability: i32,
/// The available quantity of the item.
#[prost(message, optional, tag = "20")]
pub available_quantity: ::core::option::Option<i32>,
/// Fulfillment information, such as the store IDs for in-store pickup or
/// region IDs for different shipping methods.
///
/// All the elements must have distinct
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\].
/// Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(message, repeated, tag = "21")]
pub fulfillment_info: ::prost::alloc::vec::Vec<FulfillmentInfo>,
/// Canonical URL directly linking to the product detail page.
///
/// It is strongly recommended to provide a valid uri for the product,
/// otherwise the service performance could be significantly degraded.
///
/// This field must be a UTF-8 encoded string with a length limit of 5,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[link\](<https://support.google.com/merchants/answer/6324416>). Schema.org
/// property \[Offer.url\](<https://schema.org/url>).
#[prost(string, tag = "22")]
pub uri: ::prost::alloc::string::String,
/// Product images for the product.Highly recommended to put the main image
/// to the first.
///
/// A maximum of 300 images are allowed.
///
/// Google Merchant Center property
/// \[image_link\](<https://support.google.com/merchants/answer/6324350>).
/// Schema.org property \[Product.image\](<https://schema.org/image>).
#[prost(message, repeated, tag = "23")]
pub images: ::prost::alloc::vec::Vec<Image>,
/// The target group associated with a given audience (e.g. male, veterans,
/// car owners, musicians, etc.) of the product.
#[prost(message, optional, tag = "24")]
pub audience: ::core::option::Option<Audience>,
/// The color of the product.
///
/// Google Merchant Center property
/// \[color\](<https://support.google.com/merchants/answer/6324487>). Schema.org
/// property \[Product.color\](<https://schema.org/color>).
#[prost(message, optional, tag = "25")]
pub color_info: ::core::option::Option<ColorInfo>,
/// The size of the product. To represent different size systems or size types,
/// consider using this format: \[[[size_system:]size_type:]size_value\].
///
/// For example, in "US:MENS:M", "US" represents size system; "MENS" represents
/// size type; "M" represents size value. In "GIRLS:27", size system is empty;
/// "GIRLS" represents size type; "27" represents size value. In "32 inches",
/// both size system and size type are empty, while size value is "32 inches".
///
/// A maximum of 20 values are allowed per
/// \[Product][google.cloud.retail.v2alpha.Product\]. Each value must be a UTF-8
/// encoded string with a length limit of 128 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[size\](<https://support.google.com/merchants/answer/6324492>),
/// \[size_type\](<https://support.google.com/merchants/answer/6324497>) and
/// \[size_system\](<https://support.google.com/merchants/answer/6324502>).
/// Schema.org property \[Product.size\](<https://schema.org/size>).
#[prost(string, repeated, tag = "26")]
pub sizes: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The material of the product. For example, "leather", "wooden".
///
/// A maximum of 20 values are allowed. Each value must be a UTF-8 encoded
/// string with a length limit of 128 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[material\](<https://support.google.com/merchants/answer/6324410>). Schema.org
/// property \[Product.material\](<https://schema.org/material>).
#[prost(string, repeated, tag = "27")]
pub materials: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The pattern or graphic print of the product. For example, "striped", "polka
/// dot", "paisley".
///
/// A maximum of 20 values are allowed per
/// \[Product][google.cloud.retail.v2alpha.Product\]. Each value must be a UTF-8
/// encoded string with a length limit of 128 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[pattern\](<https://support.google.com/merchants/answer/6324483>). Schema.org
/// property \[Product.pattern\](<https://schema.org/pattern>).
#[prost(string, repeated, tag = "28")]
pub patterns: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The condition of the product. Strongly encouraged to use the standard
/// values: "new", "refurbished", "used".
///
/// A maximum of 5 values are allowed per
/// \[Product][google.cloud.retail.v2alpha.Product\]. Each value must be a UTF-8
/// encoded string with a length limit of 128 characters. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// Google Merchant Center property
/// \[condition\](<https://support.google.com/merchants/answer/6324469>).
/// Schema.org property
/// \[Offer.itemCondition\](<https://schema.org/itemCondition>).
#[prost(string, repeated, tag = "29")]
pub conditions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The promotions applied to the product. A maximum of 10 values are allowed
/// per \[Product][google.cloud.retail.v2alpha.Product\].
#[prost(message, repeated, tag = "34")]
pub promotions: ::prost::alloc::vec::Vec<Promotion>,
/// The timestamp when the product is published by the retailer for the first
/// time, which indicates the freshness of the products. Note that this field
/// is different from
/// \[available_time][google.cloud.retail.v2alpha.Product.available_time\], given
/// it purely describes product freshness regardless of when it is available on
/// search and recommendation.
#[prost(message, optional, tag = "33")]
pub publish_time: ::core::option::Option<::prost_types::Timestamp>,
/// Indicates which fields in the
/// \[Product][google.cloud.retail.v2alpha.Product\]s are returned in
/// \[SearchResponse][google.cloud.retail.v2alpha.SearchResponse\].
///
/// Supported fields for all \[type][google.cloud.retail.v2alpha.Product.type\]s:
///
/// * \[audience][google.cloud.retail.v2alpha.Product.audience\]
/// * \[availability][google.cloud.retail.v2alpha.Product.availability\]
/// * \[brands][google.cloud.retail.v2alpha.Product.brands\]
/// * \[color_info][google.cloud.retail.v2alpha.Product.color_info\]
/// * \[conditions][google.cloud.retail.v2alpha.Product.conditions\]
/// * \[gtin][google.cloud.retail.v2alpha.Product.gtin\]
/// * \[materials][google.cloud.retail.v2alpha.Product.materials\]
/// * \[name][google.cloud.retail.v2alpha.Product.name\]
/// * \[patterns][google.cloud.retail.v2alpha.Product.patterns\]
/// * \[price_info][google.cloud.retail.v2alpha.Product.price_info\]
/// * \[rating][google.cloud.retail.v2alpha.Product.rating\]
/// * \[sizes][google.cloud.retail.v2alpha.Product.sizes\]
/// * \[title][google.cloud.retail.v2alpha.Product.title\]
/// * \[uri][google.cloud.retail.v2alpha.Product.uri\]
///
/// Supported fields only for
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\] and
/// \[Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION\]:
///
/// * \[categories][google.cloud.retail.v2alpha.Product.categories\]
/// * \[description][google.cloud.retail.v2alpha.Product.description\]
/// * \[images][google.cloud.retail.v2alpha.Product.images\]
///
/// Supported fields only for
/// \[Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]:
///
/// * Only the first image in
/// \[images][google.cloud.retail.v2alpha.Product.images\]
///
/// To mark \[attributes][google.cloud.retail.v2alpha.Product.attributes\] as
/// retrievable, include paths of the form "attributes.key" where "key" is the
/// key of a custom attribute, as specified in
/// \[attributes][google.cloud.retail.v2alpha.Product.attributes\].
///
/// For \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\] and
/// \[Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION\], the
/// following fields are always returned in
/// \[SearchResponse][google.cloud.retail.v2alpha.SearchResponse\] by default:
///
/// * \[name][google.cloud.retail.v2alpha.Product.name\]
///
/// For \[Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\], the
/// following fields are always returned in by default:
///
/// * \[name][google.cloud.retail.v2alpha.Product.name\]
/// * \[color_info][google.cloud.retail.v2alpha.Product.color_info\]
///
/// Maximum number of paths is 30. Otherwise, an INVALID_ARGUMENT error is
/// returned.
///
/// Note: Returning more fields in
/// \[SearchResponse][google.cloud.retail.v2alpha.SearchResponse\] may increase
/// response payload size and serving latency.
#[prost(message, optional, tag = "30")]
pub retrievable_fields: ::core::option::Option<::prost_types::FieldMask>,
/// Output only. Product variants grouped together on primary product which
/// share similar product attributes. It's automatically grouped by
/// \[primary_product_id][google.cloud.retail.v2alpha.Product.primary_product_id\]
/// for all the product variants. Only populated for
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s.
///
/// Note: This field is OUTPUT_ONLY for
/// \[ProductService.GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct\].
/// Do not set this field in API requests.
#[prost(message, repeated, tag = "31")]
pub variants: ::prost::alloc::vec::Vec<Product>,
#[prost(oneof = "product::Expiration", tags = "16, 17")]
pub expiration: ::core::option::Option<product::Expiration>,
}
/// Nested message and enum types in `Product`.
pub mod product {
/// The type of this product.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum Type {
/// Default value. Default to
/// \[Catalog.product_level_config.ingestion_product_type][google.cloud.retail.v2alpha.ProductLevelConfig.ingestion_product_type\]
/// if unset.
Unspecified = 0,
/// The primary type.
///
/// As the primary unit for predicting, indexing and search serving, a
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\] is grouped with multiple
/// \[Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s.
Primary = 1,
/// The variant type.
///
/// \[Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s usually share some common
/// attributes on the same
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s, but they have variant
/// attributes like different colors, sizes and prices, etc.
Variant = 2,
/// The collection type. Collection products are bundled
/// \[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s or
/// \[Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s that are sold together,
/// such as a jewelry set with necklaces, earrings and rings, etc.
Collection = 3,
}
/// Product availability. If this field is unspecified, the product is
/// assumed to be in stock.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum Availability {
/// Default product availability. Default to
/// \[Availability.IN_STOCK][google.cloud.retail.v2alpha.Product.Availability.IN_STOCK\]
/// if unset.
Unspecified = 0,
/// Product in stock.
InStock = 1,
/// Product out of stock.
OutOfStock = 2,
/// Product that is in pre-order state.
Preorder = 3,
/// Product that is back-ordered (i.e. temporarily out of stock).
Backorder = 4,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Expiration {
/// The timestamp when this product becomes unavailable for
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\].
///
/// If it is set, the \[Product][google.cloud.retail.v2alpha.Product\] is not
/// available for
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\]
/// after \[expire_time][google.cloud.retail.v2alpha.Product.expire_time\].
/// However, the product can still be retrieved by
/// \[ProductService.GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct\]
/// and
/// \[ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts\].
///
/// \[expire_time][google.cloud.retail.v2alpha.Product.expire_time\] must be
/// later than
/// \[available_time][google.cloud.retail.v2alpha.Product.available_time\] and
/// \[publish_time][google.cloud.retail.v2alpha.Product.publish_time\],
/// otherwise an INVALID_ARGUMENT error is thrown.
///
/// Google Merchant Center property
/// \[expiration_date\](<https://support.google.com/merchants/answer/6324499>).
#[prost(message, tag = "16")]
ExpireTime(::prost_types::Timestamp),
/// Input only. The TTL (time to live) of the product.
///
/// If it is set, it must be a non-negative value, and
/// \[expire_time][google.cloud.retail.v2alpha.Product.expire_time\] is set as
/// current timestamp plus \[ttl][google.cloud.retail.v2alpha.Product.ttl\].
/// The derived
/// \[expire_time][google.cloud.retail.v2alpha.Product.expire_time\] is
/// returned in the output and \[ttl][google.cloud.retail.v2alpha.Product.ttl\]
/// is left blank when retrieving the
/// \[Product][google.cloud.retail.v2alpha.Product\].
///
/// If it is set, the product is not available for
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\]
/// after current timestamp plus
/// \[ttl][google.cloud.retail.v2alpha.Product.ttl\]. However, the product can
/// still be retrieved by
/// \[ProductService.GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct\]
/// and
/// \[ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts\].
#[prost(message, tag = "17")]
Ttl(::prost_types::Duration),
}
}
/// UserEvent captures all metadata information Retail API needs to know about
/// how end users interact with customers' website.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserEvent {
/// Required. User event type. Allowed values are:
///
/// * `add-to-cart`: Products being added to cart.
/// * `category-page-view`: Special pages such as sale or promotion pages
/// viewed.
/// * `completion`: Completion query result showed/clicked.
/// * `detail-page-view`: Products detail page viewed.
/// * `home-page-view`: Homepage viewed.
/// * `promotion-offered`: Promotion is offered to a user.
/// * `promotion-not-offered`: Promotion is not offered to a user.
/// * `purchase-complete`: User finishing a purchase.
/// * `search`: Product search.
/// * `shopping-cart-page-view`: User viewing a shopping cart.
#[prost(string, tag = "1")]
pub event_type: ::prost::alloc::string::String,
/// Required. A unique identifier for tracking visitors.
///
/// For example, this could be implemented with an HTTP cookie, which should be
/// able to uniquely identify a visitor on a single device. This unique
/// identifier should not change if the visitor log in/out of the website.
///
/// The field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// The field should not contain PII or user-data. We recommend to use Google
/// Analystics [Client
/// ID](<https://developers.google.com/analytics/devguides/collection/analyticsjs/field-reference#clientId>)
/// for this field.
#[prost(string, tag = "2")]
pub visitor_id: ::prost::alloc::string::String,
/// A unique identifier for tracking a visitor session with a length limit of
/// 128 bytes. A session is an aggregation of an end user behavior in a time
/// span.
///
/// A general guideline to populate the sesion_id:
/// 1. If user has no activity for 30 min, a new session_id should be assigned.
/// 2. The session_id should be unique across users, suggest use uuid or add
/// visitor_id as prefix.
#[prost(string, tag = "21")]
pub session_id: ::prost::alloc::string::String,
/// Only required for
/// \[UserEventService.ImportUserEvents][google.cloud.retail.v2alpha.UserEventService.ImportUserEvents\]
/// method. Timestamp of when the user event happened.
#[prost(message, optional, tag = "3")]
pub event_time: ::core::option::Option<::prost_types::Timestamp>,
/// A list of identifiers for the independent experiment groups this user event
/// belongs to. This is used to distinguish between user events associated with
/// different experiment setups (e.g. using Retail API, using different
/// recommendation models).
#[prost(string, repeated, tag = "4")]
pub experiment_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Highly recommended for user events that are the result of
/// \[PredictionService.Predict][google.cloud.retail.v2alpha.PredictionService.Predict\].
/// This field enables accurate attribution of recommendation model
/// performance.
///
/// The value must be a valid
/// \[PredictResponse.attribution_token][google.cloud.retail.v2alpha.PredictResponse.attribution_token\]
/// for user events that are the result of
/// \[PredictionService.Predict][google.cloud.retail.v2alpha.PredictionService.Predict\].
/// The value must be a valid
/// \[SearchResponse.attribution_token][google.cloud.retail.v2alpha.SearchResponse.attribution_token\]
/// for user events that are the result of
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\].
///
/// This token enables us to accurately attribute page view or purchase back to
/// the event and the particular predict response containing this
/// clicked/purchased product. If user clicks on product K in the
/// recommendation results, pass
/// \[PredictResponse.attribution_token][google.cloud.retail.v2alpha.PredictResponse.attribution_token\]
/// as a URL parameter to product K's page. When recording events on product
/// K's page, log the
/// \[PredictResponse.attribution_token][google.cloud.retail.v2alpha.PredictResponse.attribution_token\]
/// to this field.
#[prost(string, tag = "5")]
pub attribution_token: ::prost::alloc::string::String,
/// The main product details related to the event.
///
/// This field is required for the following event types:
///
/// * `add-to-cart`
/// * `detail-page-view`
/// * `purchase-complete`
///
/// In a `search` event, this field represents the products returned to the end
/// user on the current page (the end user may have not finished browsing the
/// whole page yet). When a new page is returned to the end user, after
/// pagination/filtering/ordering even for the same query, a new `search` event
/// with different
/// \[product_details][google.cloud.retail.v2alpha.UserEvent.product_details\] is
/// desired. The end user may have not finished browsing the whole page yet.
#[prost(message, repeated, tag = "6")]
pub product_details: ::prost::alloc::vec::Vec<ProductDetail>,
/// The main completion details related to the event.
///
/// In a `completion` event, this field represents the completions returned to
/// the end user and the clicked completion by the end user. In a `search`
/// event, it represents the search event happens after clicking completion.
#[prost(message, optional, tag = "22")]
pub completion_detail: ::core::option::Option<CompletionDetail>,
/// Extra user event features to include in the recommendation model.
///
/// The key must be a UTF-8 encoded string with a length limit of 5,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// For product recommendation, an example of extra user information is
/// traffic_channel, i.e. how user arrives at the site. Users can arrive
/// at the site by coming to the site directly, or coming through Google
/// search, and etc.
#[prost(map = "string, message", tag = "7")]
pub attributes: ::std::collections::HashMap<::prost::alloc::string::String, CustomAttribute>,
/// The ID or name of the associated shopping cart. This ID is used
/// to associate multiple items added or present in the cart before purchase.
///
/// This can only be set for `add-to-cart`, `purchase-complete`, or
/// `shopping-cart-page-view` events.
#[prost(string, tag = "8")]
pub cart_id: ::prost::alloc::string::String,
/// A transaction represents the entire purchase transaction.
///
/// Required for `purchase-complete` events. Other event types should not set
/// this field. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(message, optional, tag = "9")]
pub purchase_transaction: ::core::option::Option<PurchaseTransaction>,
/// The user's search query.
///
/// See \[SearchRequest.query][google.cloud.retail.v2alpha.SearchRequest.query\]
/// for definition.
///
/// The value must be a UTF-8 encoded string with a length limit of 5,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// At least one of
/// \[search_query][google.cloud.retail.v2alpha.UserEvent.search_query\] or
/// \[page_categories][google.cloud.retail.v2alpha.UserEvent.page_categories\] is
/// required for `search` events. Other event types should not set this field.
/// Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "10")]
pub search_query: ::prost::alloc::string::String,
/// The filter syntax consists of an expression language for constructing a
/// predicate from one or more fields of the products being filtered.
///
/// See
/// \[SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter\]
/// for definition and syntax.
///
/// The value must be a UTF-8 encoded string with a length limit of 1,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "16")]
pub filter: ::prost::alloc::string::String,
/// The order in which products are returned.
///
/// See
/// \[SearchRequest.order_by][google.cloud.retail.v2alpha.SearchRequest.order_by\]
/// for definition and syntax.
///
/// The value must be a UTF-8 encoded string with a length limit of 1,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
///
/// This can only be set for `search` events. Other event types should not set
/// this field. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "17")]
pub order_by: ::prost::alloc::string::String,
/// An integer that specifies the current offset for pagination (the 0-indexed
/// starting location, amongst the products deemed by the API as relevant).
///
/// See
/// \[SearchRequest.offset][google.cloud.retail.v2alpha.SearchRequest.offset\]
/// for definition.
///
/// If this field is negative, an INVALID_ARGUMENT is returned.
///
/// This can only be set for `search` events. Other event types should not set
/// this field. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(int32, tag = "18")]
pub offset: i32,
/// The categories associated with a category page.
///
/// To represent full path of category, use '>' sign to separate different
/// hierarchies. If '>' is part of the category name, please replace it with
/// other character(s).
///
/// Category pages include special pages such as sales or promotions. For
/// instance, a special sale page may have the category hierarchy:
/// "pageCategories" : ["Sales > 2017 Black Friday Deals"].
///
/// Required for `category-page-view` events. At least one of
/// \[search_query][google.cloud.retail.v2alpha.UserEvent.search_query\] or
/// \[page_categories][google.cloud.retail.v2alpha.UserEvent.page_categories\] is
/// required for `search` events. Other event types should not set this field.
/// Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, repeated, tag = "11")]
pub page_categories: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// User information.
#[prost(message, optional, tag = "12")]
pub user_info: ::core::option::Option<UserInfo>,
/// Complete URL (window.location.href) of the user's current page.
///
/// When using the client side event reporting with JavaScript pixel and Google
/// Tag Manager, this value is filled in automatically. Maximum length 5,000
/// characters.
#[prost(string, tag = "13")]
pub uri: ::prost::alloc::string::String,
/// The referrer URL of the current page.
///
/// When using the client side event reporting with JavaScript pixel and Google
/// Tag Manager, this value is filled in automatically.
#[prost(string, tag = "14")]
pub referrer_uri: ::prost::alloc::string::String,
/// A unique ID of a web page view.
///
/// This should be kept the same for all user events triggered from the same
/// pageview. For example, an item detail page view could trigger multiple
/// events as the user is browsing the page. The `pageViewId` property should
/// be kept the same for all these events so that they can be grouped together
/// properly.
///
/// When using the client side event reporting with JavaScript pixel and Google
/// Tag Manager, this value is filled in automatically.
#[prost(string, tag = "15")]
pub page_view_id: ::prost::alloc::string::String,
}
/// Detailed product information associated with a user event.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ProductDetail {
/// Required. \[Product][google.cloud.retail.v2alpha.Product\] information.
///
/// Required field(s):
///
/// * \[Product.id][google.cloud.retail.v2alpha.Product.id\]
///
/// Optional override field(s):
///
/// * \[Product.price_info][google.cloud.retail.v2alpha.Product.price_info\]
///
/// If any supported optional fields are provided, we will treat them as a full
/// override when looking up product information from the catalog. Thus, it is
/// important to ensure that the overriding fields are accurate and
/// complete.
///
/// All other product fields are ignored and instead populated via catalog
/// lookup after event ingestion.
#[prost(message, optional, tag = "1")]
pub product: ::core::option::Option<Product>,
/// Quantity of the product associated with the user event.
///
/// For example, this field will be 2 if two products are added to the shopping
/// cart for `purchase-complete` event. Required for `add-to-cart` and
/// `purchase-complete` event types.
#[prost(message, optional, tag = "2")]
pub quantity: ::core::option::Option<i32>,
}
/// Detailed completion information including completion attribution token and
/// clicked completion info.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompletionDetail {
/// Completion attribution token in
/// \[CompleteQueryResponse.attribution_token][google.cloud.retail.v2alpha.CompleteQueryResponse.attribution_token\].
#[prost(string, tag = "1")]
pub completion_attribution_token: ::prost::alloc::string::String,
/// End user selected
/// \[CompleteQueryResponse.CompletionResult.suggestion][google.cloud.retail.v2alpha.CompleteQueryResponse.CompletionResult.suggestion\].
#[prost(string, tag = "2")]
pub selected_suggestion: ::prost::alloc::string::String,
/// End user selected
/// \[CompleteQueryResponse.CompletionResult.suggestion][google.cloud.retail.v2alpha.CompleteQueryResponse.CompletionResult.suggestion\]
/// position, starting from 0.
#[prost(int32, tag = "3")]
pub selected_position: i32,
}
/// A transaction represents the entire purchase transaction.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurchaseTransaction {
/// The transaction ID with a length limit of 128 characters.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Required. Total non-zero revenue or grand total associated with the
/// transaction. This value include shipping, tax, or other adjustments to
/// total revenue that you want to include as part of your revenue
/// calculations.
#[prost(float, tag = "2")]
pub revenue: f32,
/// All the taxes associated with the transaction.
#[prost(float, tag = "3")]
pub tax: f32,
/// All the costs associated with the products. These can be manufacturing
/// costs, shipping expenses not borne by the end user, or any other costs,
/// such that:
///
/// * Profit =
/// \[revenue][google.cloud.retail.v2alpha.PurchaseTransaction.revenue\] -
/// \[tax][google.cloud.retail.v2alpha.PurchaseTransaction.tax\] -
/// \[cost][google.cloud.retail.v2alpha.PurchaseTransaction.cost\]
#[prost(float, tag = "4")]
pub cost: f32,
/// Required. Currency code. Use three-character ISO-4217 code.
#[prost(string, tag = "5")]
pub currency_code: ::prost::alloc::string::String,
}
/// Google Cloud Storage location for input content.
/// format.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GcsSource {
/// Required. Google Cloud Storage URIs to input files. URI can be up to
/// 2000 characters long. URIs can match the full object path (for example,
/// `gs://bucket/directory/object.json`) or a pattern matching one or more
/// files, such as `gs://bucket/directory/*.json`. A request can
/// contain at most 100 files, and each file can be up to 2 GB. See
/// [Importing product
/// information](<https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog>)
/// for the expected file format and setup instructions.
#[prost(string, repeated, tag = "1")]
pub input_uris: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The schema to use when parsing the data from the source.
///
/// Supported values for product imports:
///
/// * `product` (default): One JSON
/// \[Product][google.cloud.retail.v2alpha.Product\] per line. Each product must
/// have a valid \[Product.id][google.cloud.retail.v2alpha.Product.id\].
/// * `product_merchant_center`: See [Importing catalog data from Merchant
/// Center](<https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc>).
///
/// Supported values for user events imports:
///
/// * `user_event` (default): One JSON
/// \[UserEvent][google.cloud.retail.v2alpha.UserEvent\] per line.
/// * `user_event_ga360`: Using
/// <https://support.google.com/analytics/answer/3437719.>
#[prost(string, tag = "2")]
pub data_schema: ::prost::alloc::string::String,
}
/// BigQuery source import data from.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BigQuerySource {
/// The project ID (can be project # or ID) that the BigQuery source is in with
/// a length limit of 128 characters. If not specified, inherits the project
/// ID from the parent request.
#[prost(string, tag = "5")]
pub project_id: ::prost::alloc::string::String,
/// Required. The BigQuery data set to copy the data from with a length limit
/// of 1,024 characters.
#[prost(string, tag = "1")]
pub dataset_id: ::prost::alloc::string::String,
/// Required. The BigQuery table to copy the data from with a length limit of
/// 1,024 characters.
#[prost(string, tag = "2")]
pub table_id: ::prost::alloc::string::String,
/// Intermediate Cloud Storage directory used for the import with a length
/// limit of 2,000 characters. Can be specified if one wants to have the
/// BigQuery export to a specific Cloud Storage directory.
#[prost(string, tag = "3")]
pub gcs_staging_dir: ::prost::alloc::string::String,
/// The schema to use when parsing the data from the source.
///
/// Supported values for product imports:
///
/// * `product` (default): One JSON
/// \[Product][google.cloud.retail.v2alpha.Product\] per line. Each product must
/// have a valid \[Product.id][google.cloud.retail.v2alpha.Product.id\].
/// * `product_merchant_center`: See [Importing catalog data from Merchant
/// Center](<https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc>).
///
/// Supported values for user events imports:
///
/// * `user_event` (default): One JSON
/// \[UserEvent][google.cloud.retail.v2alpha.UserEvent\] per line.
/// * `user_event_ga360`: Using
/// <https://support.google.com/analytics/answer/3437719.>
#[prost(string, tag = "4")]
pub data_schema: ::prost::alloc::string::String,
/// BigQuery table partition info. Leave this empty if the BigQuery table
/// is not partitioned.
#[prost(oneof = "big_query_source::Partition", tags = "6")]
pub partition: ::core::option::Option<big_query_source::Partition>,
}
/// Nested message and enum types in `BigQuerySource`.
pub mod big_query_source {
/// BigQuery table partition info. Leave this empty if the BigQuery table
/// is not partitioned.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Partition {
/// BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
///
/// Only supported when
/// \[ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2alpha.ImportProductsRequest.reconciliation_mode\]
/// is set to `FULL`.
#[prost(message, tag = "6")]
PartitionDate(super::super::super::super::r#type::Date),
}
}
/// The inline source for the input config for ImportProducts method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ProductInlineSource {
/// Required. A list of products to update/create. Each product must have a
/// valid \[Product.id][google.cloud.retail.v2alpha.Product.id\]. Recommended max
/// of 100 items.
#[prost(message, repeated, tag = "1")]
pub products: ::prost::alloc::vec::Vec<Product>,
}
/// The inline source for the input config for ImportUserEvents method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserEventInlineSource {
/// Required. A list of user events to import. Recommended max of 10k items.
#[prost(message, repeated, tag = "1")]
pub user_events: ::prost::alloc::vec::Vec<UserEvent>,
}
/// Configuration of destination for Import related errors.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportErrorsConfig {
/// Required. Errors destination.
#[prost(oneof = "import_errors_config::Destination", tags = "1")]
pub destination: ::core::option::Option<import_errors_config::Destination>,
}
/// Nested message and enum types in `ImportErrorsConfig`.
pub mod import_errors_config {
/// Required. Errors destination.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// Google Cloud Storage path for import errors. This must be an empty,
/// existing Cloud Storage bucket. Import errors will be written to a file in
/// this bucket, one per line, as a JSON-encoded
/// `google.rpc.Status` message.
#[prost(string, tag = "1")]
GcsPrefix(::prost::alloc::string::String),
}
}
/// Request message for Import methods.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportProductsRequest {
/// Required.
/// `projects/1234/locations/global/catalogs/default_catalog/branches/default_branch`
///
/// If no updateMask is specified, requires products.create permission.
/// If updateMask is specified, requires products.update permission.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Unique identifier provided by client, within the ancestor
/// dataset scope. Ensures idempotency and used for request deduplication.
/// Server-generated if unspecified. Up to 128 characters long and must match
/// the pattern: `\[a-zA-Z0-9_\]+`. This is returned as \[Operation.name][\] in
/// \[ImportMetadata][google.cloud.retail.v2alpha.ImportMetadata\].
///
/// Only supported when
/// \[ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2alpha.ImportProductsRequest.reconciliation_mode\]
/// is set to `FULL`.
#[prost(string, tag = "6")]
pub request_id: ::prost::alloc::string::String,
/// Required. The desired input location of the data.
#[prost(message, optional, tag = "2")]
pub input_config: ::core::option::Option<ProductInputConfig>,
/// The desired location of errors incurred during the Import.
#[prost(message, optional, tag = "3")]
pub errors_config: ::core::option::Option<ImportErrorsConfig>,
/// Indicates which fields in the provided imported 'products' to update. If
/// not set, will by default update all fields.
#[prost(message, optional, tag = "4")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// The mode of reconciliation between existing products and the products to be
/// imported. Defaults to
/// \[ReconciliationMode.INCREMENTAL][google.cloud.retail.v2alpha.ImportProductsRequest.ReconciliationMode.INCREMENTAL\].
#[prost(enumeration = "import_products_request::ReconciliationMode", tag = "5")]
pub reconciliation_mode: i32,
/// Pub/Sub topic for receiving notification. If this field is set,
/// when the import is finished, a notification will be sent to
/// specified Pub/Sub topic. The message data will be JSON string of a
/// \[Operation][google.longrunning.Operation\].
/// Format of the Pub/Sub topic is `projects/{project}/topics/{topic}`.
///
/// Only supported when
/// \[ImportProductsRequest.reconciliation_mode][google.cloud.retail.v2alpha.ImportProductsRequest.reconciliation_mode\]
/// is set to `FULL`.
#[prost(string, tag = "7")]
pub notification_pubsub_topic: ::prost::alloc::string::String,
}
/// Nested message and enum types in `ImportProductsRequest`.
pub mod import_products_request {
/// Indicates how imported products are reconciled with the existing products
/// created or imported before.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum ReconciliationMode {
/// Defaults to INCREMENTAL.
Unspecified = 0,
/// Inserts new products or updates existing products.
Incremental = 1,
/// Calculates diff and replaces the entire product dataset. Existing
/// products may be deleted if they are not present in the source location.
///
/// Can only be while using
/// \[BigQuerySource][google.cloud.retail.v2alpha.BigQuerySource\].
///
/// Add the IAM permission "BigQuery Data Viewer" for
/// [email protected] before
/// using this feature otherwise an error is thrown.
///
/// This feature is only available for users who have Retail Search enabled.
/// Please submit a form \[here\](<https://cloud.google.com/contact>) to contact
/// cloud sales if you are interested in using Retail Search.
Full = 2,
}
}
/// Request message for the ImportUserEvents request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportUserEventsRequest {
/// Required. `projects/1234/locations/global/catalogs/default_catalog`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The desired input location of the data.
#[prost(message, optional, tag = "2")]
pub input_config: ::core::option::Option<UserEventInputConfig>,
/// The desired location of errors incurred during the Import. Cannot be set
/// for inline user event imports.
#[prost(message, optional, tag = "3")]
pub errors_config: ::core::option::Option<ImportErrorsConfig>,
}
/// Request message for ImportCompletionData methods.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportCompletionDataRequest {
/// Required. The catalog which the suggestions dataset belongs to.
///
/// Format: `projects/1234/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The desired input location of the data.
#[prost(message, optional, tag = "2")]
pub input_config: ::core::option::Option<CompletionDataInputConfig>,
/// Pub/Sub topic for receiving notification. If this field is set,
/// when the import is finished, a notification will be sent to
/// specified Pub/Sub topic. The message data will be JSON string of a
/// \[Operation][google.longrunning.Operation\].
/// Format of the Pub/Sub topic is `projects/{project}/topics/{topic}`.
#[prost(string, tag = "3")]
pub notification_pubsub_topic: ::prost::alloc::string::String,
}
/// The input config source for products.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ProductInputConfig {
/// Required. The source of the input.
#[prost(oneof = "product_input_config::Source", tags = "1, 2, 3")]
pub source: ::core::option::Option<product_input_config::Source>,
}
/// Nested message and enum types in `ProductInputConfig`.
pub mod product_input_config {
/// Required. The source of the input.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Source {
/// The Inline source for the input content for products.
#[prost(message, tag = "1")]
ProductInlineSource(super::ProductInlineSource),
/// Google Cloud Storage location for the input content.
#[prost(message, tag = "2")]
GcsSource(super::GcsSource),
/// BigQuery input source.
#[prost(message, tag = "3")]
BigQuerySource(super::BigQuerySource),
}
}
/// The input config source for user events.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserEventInputConfig {
/// The source of the input.
#[prost(oneof = "user_event_input_config::Source", tags = "1, 2, 3")]
pub source: ::core::option::Option<user_event_input_config::Source>,
}
/// Nested message and enum types in `UserEventInputConfig`.
pub mod user_event_input_config {
/// The source of the input.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Source {
/// Required. The Inline source for the input content for UserEvents.
#[prost(message, tag = "1")]
UserEventInlineSource(super::UserEventInlineSource),
/// Required. Google Cloud Storage location for the input content.
#[prost(message, tag = "2")]
GcsSource(super::GcsSource),
/// Required. BigQuery input source.
#[prost(message, tag = "3")]
BigQuerySource(super::BigQuerySource),
}
}
/// The input config source for completion data.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompletionDataInputConfig {
/// The source of the input.
///
/// Supported
/// \[BigQuerySource.data_schema][google.cloud.retail.v2alpha.BigQuerySource.data_schema\]
/// values for suggestions imports:
///
/// * `suggestions` (default): One JSON completion suggestion per line.
/// * `denylist`: One JSON deny suggestion per line.
/// * `allowlist`: One JSON allow suggestion per line.
#[prost(oneof = "completion_data_input_config::Source", tags = "1")]
pub source: ::core::option::Option<completion_data_input_config::Source>,
}
/// Nested message and enum types in `CompletionDataInputConfig`.
pub mod completion_data_input_config {
/// The source of the input.
///
/// Supported
/// \[BigQuerySource.data_schema][google.cloud.retail.v2alpha.BigQuerySource.data_schema\]
/// values for suggestions imports:
///
/// * `suggestions` (default): One JSON completion suggestion per line.
/// * `denylist`: One JSON deny suggestion per line.
/// * `allowlist`: One JSON allow suggestion per line.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Source {
/// Required. BigQuery input source.
///
/// Add the IAM permission "BigQuery Data Viewer" for
/// [email protected] before
/// using this feature otherwise an error is thrown.
#[prost(message, tag = "1")]
BigQuerySource(super::BigQuerySource),
}
}
/// Metadata related to the progress of the Import operation. This will be
/// returned by the google.longrunning.Operation.metadata field.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportMetadata {
/// Operation create time.
#[prost(message, optional, tag = "1")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Operation last update time. If the operation is done, this is also the
/// finish time.
#[prost(message, optional, tag = "2")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Count of entries that were processed successfully.
#[prost(int64, tag = "3")]
pub success_count: i64,
/// Count of entries that encountered errors while processing.
#[prost(int64, tag = "4")]
pub failure_count: i64,
/// Id of the request / operation. This is parroting back the requestId
/// that was passed in the request.
#[prost(string, tag = "5")]
pub request_id: ::prost::alloc::string::String,
/// Pub/Sub topic for receiving notification. If this field is set,
/// when the import is finished, a notification will be sent to
/// specified Pub/Sub topic. The message data will be JSON string of a
/// \[Operation][google.longrunning.Operation\].
/// Format of the Pub/Sub topic is `projects/{project}/topics/{topic}`.
#[prost(string, tag = "6")]
pub notification_pubsub_topic: ::prost::alloc::string::String,
}
/// Response of the
/// \[ImportProductsRequest][google.cloud.retail.v2alpha.ImportProductsRequest\].
/// If the long running operation is done, then this message is returned by the
/// google.longrunning.Operations.response field if the operation was successful.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportProductsResponse {
/// A sample of errors encountered while processing the request.
#[prost(message, repeated, tag = "1")]
pub error_samples: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
/// Echoes the destination for the complete errors in the request if set.
#[prost(message, optional, tag = "2")]
pub errors_config: ::core::option::Option<ImportErrorsConfig>,
}
/// Response of the ImportUserEventsRequest. If the long running
/// operation was successful, then this message is returned by the
/// google.longrunning.Operations.response field if the operation was successful.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportUserEventsResponse {
/// A sample of errors encountered while processing the request.
#[prost(message, repeated, tag = "1")]
pub error_samples: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
/// Echoes the destination for the complete errors if this field was set in
/// the request.
#[prost(message, optional, tag = "2")]
pub errors_config: ::core::option::Option<ImportErrorsConfig>,
/// Aggregated statistics of user event import status.
#[prost(message, optional, tag = "3")]
pub import_summary: ::core::option::Option<UserEventImportSummary>,
}
/// A summary of import result. The UserEventImportSummary summarizes
/// the import status for user events.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserEventImportSummary {
/// Count of user events imported with complete existing catalog information.
#[prost(int64, tag = "1")]
pub joined_events_count: i64,
/// Count of user events imported, but with catalog information not found
/// in the imported catalog.
#[prost(int64, tag = "2")]
pub unjoined_events_count: i64,
}
/// Response of the
/// \[ImportCompletionDataRequest][google.cloud.retail.v2alpha.ImportCompletionDataRequest\].
/// If the long running operation is done, this message is returned by the
/// google.longrunning.Operations.response field if the operation is successful.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportCompletionDataResponse {
/// A sample of errors encountered while processing the request.
#[prost(message, repeated, tag = "1")]
pub error_samples: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
}
/// Configures what level the product should be uploaded with regards to
/// how users will be send events and how predictions will be made.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ProductLevelConfig {
/// The type of \[Product][google.cloud.retail.v2alpha.Product\]s allowed to be
/// ingested into the catalog. Acceptable values are:
///
/// * `primary` (default): You can ingest
/// \[Product][google.cloud.retail.v2alpha.Product\]s of all types. When
/// ingesting a \[Product][google.cloud.retail.v2alpha.Product\], its type will
/// default to
/// \[Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// if unset.
/// * `variant`: You can only ingest
/// \[Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s.
/// This means
/// \[Product.primary_product_id][google.cloud.retail.v2alpha.Product.primary_product_id\]
/// cannot be empty.
///
/// If this field is set to an invalid value other than these, an
/// INVALID_ARGUMENT error is returned.
///
/// If this field is `variant` and
/// \[merchant_center_product_id_field][google.cloud.retail.v2alpha.ProductLevelConfig.merchant_center_product_id_field\]
/// is `itemGroupId`, an INVALID_ARGUMENT error is returned.
///
/// See [Using product
/// levels](<https://cloud.google.com/retail/recommendations-ai/docs/catalog#product-levels>)
/// for more details.
#[prost(string, tag = "1")]
pub ingestion_product_type: ::prost::alloc::string::String,
/// Which field of [Merchant Center
/// Product](/bigquery-transfer/docs/merchant-center-products-schema) should be
/// imported as \[Product.id][google.cloud.retail.v2alpha.Product.id\].
/// Acceptable values are:
///
/// * `offerId` (default): Import `offerId` as the product ID.
/// * `itemGroupId`: Import `itemGroupId` as the product ID. Notice that Retail
/// API will choose one item from the ones with the same `itemGroupId`, and
/// use it to represent the item group.
///
/// If this field is set to an invalid value other than these, an
/// INVALID_ARGUMENT error is returned.
///
/// If this field is `itemGroupId` and
/// \[ingestion_product_type][google.cloud.retail.v2alpha.ProductLevelConfig.ingestion_product_type\]
/// is `variant`, an INVALID_ARGUMENT error is returned.
///
/// See [Using product
/// levels](<https://cloud.google.com/retail/recommendations-ai/docs/catalog#product-levels>)
/// for more details.
#[prost(string, tag = "2")]
pub merchant_center_product_id_field: ::prost::alloc::string::String,
}
/// Represents a link between a Merchant Center account and a branch.
/// Once a link is established, products from the linked merchant center account
/// will be streamed to the linked branch.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MerchantCenterLink {
/// Required. The linked [Merchant center account
/// id](<https://developers.google.com/shopping-content/guides/accountstatuses>).
/// The account must be a standalone account or a sub-account of a MCA.
#[prost(int64, tag = "1")]
pub merchant_center_account_id: i64,
/// The branch id (e.g. 0/1/2) within this catalog that products from
/// merchant_center_account_id are streamed to. When updating this field, an
/// empty value will use the currently configured default branch. However,
/// changing the default branch later on won't change the linked branch here.
///
/// A single branch id can only have one linked merchant center account id.
#[prost(string, tag = "2")]
pub branch_id: ::prost::alloc::string::String,
/// String representing the destination to import for, all if left empty.
/// List of possible values can be found here.
/// \[<https://support.google.com/merchants/answer/7501026\]>
/// List of allowed string values:
/// "Shopping_ads", "Buy_on_google_listings", "Display_ads", "Local_inventory
/// _ads", "Free_listings", "Free_local_listings"
/// NOTE: The string values are case sensitive.
#[prost(string, repeated, tag = "3")]
pub destinations: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Configures Merchant Center linking.
/// Links contained in the config will be used to sync data from a Merchant
/// Center account to a Cloud Retail branch.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MerchantCenterLinkingConfig {
/// Links between Merchant Center accounts and branches.
#[prost(message, repeated, tag = "1")]
pub links: ::prost::alloc::vec::Vec<MerchantCenterLink>,
}
/// The catalog configuration.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Catalog {
/// Required. Immutable. The fully qualified resource name of the catalog.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. Immutable. The catalog display name.
///
/// This field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. The product level configuration.
#[prost(message, optional, tag = "4")]
pub product_level_config: ::core::option::Option<ProductLevelConfig>,
/// The Merchant Center linking configuration.
/// Once a link is added, the data stream from Merchant Center to Cloud Retail
/// will be enabled automatically. The requester must have access to the
/// merchant center account in order to make changes to this field.
#[prost(message, optional, tag = "6")]
pub merchant_center_linking_config: ::core::option::Option<MerchantCenterLinkingConfig>,
}
/// Request for
/// \[CatalogService.ListCatalogs][google.cloud.retail.v2alpha.CatalogService.ListCatalogs\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListCatalogsRequest {
/// Required. The account resource name with an associated location.
///
/// If the caller does not have permission to list
/// \[Catalog][google.cloud.retail.v2alpha.Catalog\]s under this location,
/// regardless of whether or not this location exists, a PERMISSION_DENIED
/// error is returned.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Maximum number of \[Catalog][google.cloud.retail.v2alpha.Catalog\]s to
/// return. If unspecified, defaults to 50. The maximum allowed value is 1000.
/// Values above 1000 will be coerced to 1000.
///
/// If this field is negative, an INVALID_ARGUMENT is returned.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token
/// \[ListCatalogsResponse.next_page_token][google.cloud.retail.v2alpha.ListCatalogsResponse.next_page_token\],
/// received from a previous
/// \[CatalogService.ListCatalogs][google.cloud.retail.v2alpha.CatalogService.ListCatalogs\]
/// call. Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[CatalogService.ListCatalogs][google.cloud.retail.v2alpha.CatalogService.ListCatalogs\]
/// must match the call that provided the page token. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
}
/// Response for
/// \[CatalogService.ListCatalogs][google.cloud.retail.v2alpha.CatalogService.ListCatalogs\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListCatalogsResponse {
/// All the customer's \[Catalog][google.cloud.retail.v2alpha.Catalog\]s.
#[prost(message, repeated, tag = "1")]
pub catalogs: ::prost::alloc::vec::Vec<Catalog>,
/// A token that can be sent as
/// \[ListCatalogsRequest.page_token][google.cloud.retail.v2alpha.ListCatalogsRequest.page_token\]
/// to retrieve the next page. If this field is omitted, there are no
/// subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request for
/// \[CatalogService.UpdateCatalog][google.cloud.retail.v2alpha.CatalogService.UpdateCatalog\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateCatalogRequest {
/// Required. The \[Catalog][google.cloud.retail.v2alpha.Catalog\] to update.
///
/// If the caller does not have permission to update the
/// \[Catalog][google.cloud.retail.v2alpha.Catalog\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
///
/// If the \[Catalog][google.cloud.retail.v2alpha.Catalog\] to update does not
/// exist, a NOT_FOUND error is returned.
#[prost(message, optional, tag = "1")]
pub catalog: ::core::option::Option<Catalog>,
/// Indicates which fields in the provided
/// \[Catalog][google.cloud.retail.v2alpha.Catalog\] to update.
///
/// If an unsupported or unknown field is provided, an INVALID_ARGUMENT error
/// is returned.
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message to set a specified branch as new default_branch.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SetDefaultBranchRequest {
/// Full resource name of the catalog, such as
/// `projects/*/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub catalog: ::prost::alloc::string::String,
/// The final component of the resource name of a branch.
///
/// This field must be one of "0", "1" or "2". Otherwise, an INVALID_ARGUMENT
/// error is returned.
#[prost(string, tag = "2")]
pub branch_id: ::prost::alloc::string::String,
/// Some note on this request, this can be retrieved by
/// \[CatalogService.GetDefaultBranch][google.cloud.retail.v2alpha.CatalogService.GetDefaultBranch\]
/// before next valid default branch set occurs.
///
/// This field must be a UTF-8 encoded string with a length limit of 1,000
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "3")]
pub note: ::prost::alloc::string::String,
}
/// Request message to show which branch is currently the default branch.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetDefaultBranchRequest {
/// The parent catalog resource name, such as
/// `projects/*/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub catalog: ::prost::alloc::string::String,
}
/// Response message of
/// \[CatalogService.GetDefaultBranch][google.cloud.retail.v2alpha.CatalogService.GetDefaultBranch\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetDefaultBranchResponse {
/// Full resource name of the branch id currently set as default branch.
#[prost(string, tag = "1")]
pub branch: ::prost::alloc::string::String,
/// The time when this branch is set to default.
#[prost(message, optional, tag = "2")]
pub set_time: ::core::option::Option<::prost_types::Timestamp>,
/// This corresponds to
/// \[SetDefaultBranchRequest.note][google.cloud.retail.v2alpha.SetDefaultBranchRequest.note\]
/// field, when this branch was set as default.
#[prost(string, tag = "3")]
pub note: ::prost::alloc::string::String,
}
#[doc = r" Generated client implementations."]
pub mod catalog_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Service for managing catalog configuration."]
#[derive(Debug, Clone)]
pub struct CatalogServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> CatalogServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> CatalogServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
CatalogServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Lists all the [Catalog][google.cloud.retail.v2alpha.Catalog]s associated"]
#[doc = " with the project."]
pub async fn list_catalogs(
&mut self,
request: impl tonic::IntoRequest<super::ListCatalogsRequest>,
) -> Result<tonic::Response<super::ListCatalogsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.CatalogService/ListCatalogs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates the [Catalog][google.cloud.retail.v2alpha.Catalog]s."]
pub async fn update_catalog(
&mut self,
request: impl tonic::IntoRequest<super::UpdateCatalogRequest>,
) -> Result<tonic::Response<super::Catalog>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.CatalogService/UpdateCatalog",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Set a specified branch id as default branch. API methods such as"]
#[doc = " [SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search],"]
#[doc = " [ProductService.GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct],"]
#[doc = " [ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]"]
#[doc = " will treat requests using \"default_branch\" to the actual branch id set as"]
#[doc = " default."]
#[doc = ""]
#[doc = " For example, if `projects/*/locations/*/catalogs/*/branches/1` is set as"]
#[doc = " default, setting"]
#[doc = " [SearchRequest.branch][google.cloud.retail.v2alpha.SearchRequest.branch] to"]
#[doc = " `projects/*/locations/*/catalogs/*/branches/default_branch` is equivalent"]
#[doc = " to setting"]
#[doc = " [SearchRequest.branch][google.cloud.retail.v2alpha.SearchRequest.branch] to"]
#[doc = " `projects/*/locations/*/catalogs/*/branches/1`."]
#[doc = ""]
#[doc = " Using multiple branches can be useful when developers would like"]
#[doc = " to have a staging branch to test and verify for future usage. When it"]
#[doc = " becomes ready, developers switch on the staging branch using this API while"]
#[doc = " keeping using `projects/*/locations/*/catalogs/*/branches/default_branch`"]
#[doc = " as [SearchRequest.branch][google.cloud.retail.v2alpha.SearchRequest.branch]"]
#[doc = " to route the traffic to this staging branch."]
#[doc = ""]
#[doc = " CAUTION: If you have live predict/search traffic, switching the default"]
#[doc = " branch could potentially cause outages if the ID space of the new branch is"]
#[doc = " very different from the old one."]
#[doc = ""]
#[doc = " More specifically:"]
#[doc = ""]
#[doc = " * PredictionService will only return product IDs from branch {newBranch}."]
#[doc = " * SearchService will only return product IDs from branch {newBranch}"]
#[doc = " (if branch is not explicitly set)."]
#[doc = " * UserEventService will only join events with products from branch"]
#[doc = " {newBranch}."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn set_default_branch(
&mut self,
request: impl tonic::IntoRequest<super::SetDefaultBranchRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.CatalogService/SetDefaultBranch",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Get which branch is currently default branch set by"]
#[doc = " [CatalogService.SetDefaultBranch][google.cloud.retail.v2alpha.CatalogService.SetDefaultBranch]"]
#[doc = " method under a specified parent catalog."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn get_default_branch(
&mut self,
request: impl tonic::IntoRequest<super::GetDefaultBranchRequest>,
) -> Result<tonic::Response<super::GetDefaultBranchResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.CatalogService/GetDefaultBranch",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Auto-complete parameters.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompleteQueryRequest {
/// Required. Catalog for which the completion is performed.
///
/// Full resource name of catalog, such as
/// `projects/*/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub catalog: ::prost::alloc::string::String,
/// Required. The query used to generate suggestions.
///
/// The maximum number of allowed characters is 255.
#[prost(string, tag = "2")]
pub query: ::prost::alloc::string::String,
/// A unique identifier for tracking visitors. For example, this could be
/// implemented with an HTTP cookie, which should be able to uniquely identify
/// a visitor on a single device. This unique identifier should not change if
/// the visitor logs in or out of the website.
///
/// The field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "7")]
pub visitor_id: ::prost::alloc::string::String,
/// The list of languages of the query. This is
/// the BCP-47 language code, such as "en-US" or "sr-Latn".
/// For more information, see
/// [Tags for Identifying Languages](<https://tools.ietf.org/html/bcp47>).
///
/// The maximum number of allowed characters is 255.
/// Only "en-US" is currently supported.
#[prost(string, repeated, tag = "3")]
pub language_codes: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The device type context for completion suggestions.
/// It is useful to apply different suggestions on different device types, e.g.
/// `DESKTOP`, `MOBILE`. If it is empty, the suggestions are across all device
/// types.
///
/// Supported formats:
///
/// * `UNKNOWN_DEVICE_TYPE`
///
/// * `DESKTOP`
///
/// * `MOBILE`
///
/// * A customized string starts with `OTHER_`, e.g. `OTHER_IPHONE`.
#[prost(string, tag = "4")]
pub device_type: ::prost::alloc::string::String,
/// Determines which dataset to use for fetching completion. "user-data" will
/// use the imported dataset through
/// \[CompletionService.ImportCompletionData][google.cloud.retail.v2alpha.CompletionService.ImportCompletionData\].
/// "cloud-retail" will use the dataset generated by cloud retail based on user
/// events. If leave empty, it will use the "user-data".
///
/// Current supported values:
///
/// * user-data
///
/// * cloud-retail
/// This option requires additional allowlisting. Before using cloud-retail,
/// contact Cloud Retail support team first.
#[prost(string, tag = "6")]
pub dataset: ::prost::alloc::string::String,
/// Completion max suggestions. If left unset or set to 0, then will fallback
/// to the configured value
/// \[CompletionConfig.max_suggestions][google.cloud.retail.v2alpha.CompletionConfig.max_suggestions\].
///
/// The maximum allowed max suggestions is 20. If it is set higher, it will be
/// capped by 20.
#[prost(int32, tag = "5")]
pub max_suggestions: i32,
}
/// Response of the auto-complete query.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompleteQueryResponse {
/// Results of the matching suggestions. The result list is ordered and the
/// first result is top suggestion.
#[prost(message, repeated, tag = "1")]
pub completion_results: ::prost::alloc::vec::Vec<complete_query_response::CompletionResult>,
/// A unique complete token. This should be included in the
/// \[SearchRequest][google.cloud.retail.v2alpha.SearchRequest\] resulting from
/// this completion, which enables accurate attribution of complete model
/// performance.
#[prost(string, tag = "2")]
pub attribution_token: ::prost::alloc::string::String,
/// Matched recent searches of this user. The maximum number of recent searches
/// is 10. This field is a restricted feature. Contact Retail Search support
/// team if you are interested in enabling it.
///
/// This feature is only available when
/// \[CompleteQueryRequest.visitor_id][google.cloud.retail.v2alpha.CompleteQueryRequest.visitor_id\]
/// field is set and \[UserEvent][google.cloud.retail.v2alpha.UserEvent\] is
/// imported. The recent searches satisfy the follow rules:
/// * They are ordered from latest to oldest.
/// * They are matched with
/// \[CompleteQueryRequest.query][google.cloud.retail.v2alpha.CompleteQueryRequest.query\]
/// case insensitively.
/// * They are transformed to lower cases.
/// * They are UTF-8 safe.
///
/// Recent searches are deduplicated. More recent searches will be reserved
/// when duplication happens.
#[prost(message, repeated, tag = "3")]
pub recent_search_results:
::prost::alloc::vec::Vec<complete_query_response::RecentSearchResult>,
}
/// Nested message and enum types in `CompleteQueryResponse`.
pub mod complete_query_response {
/// Resource that represents completion results.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompletionResult {
/// The suggestion for the query.
#[prost(string, tag = "1")]
pub suggestion: ::prost::alloc::string::String,
/// Additional custom attributes ingested through BigQuery.
#[prost(map = "string, message", tag = "2")]
pub attributes:
::std::collections::HashMap<::prost::alloc::string::String, super::CustomAttribute>,
}
/// Recent search of this user.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RecentSearchResult {
/// The recent search query.
#[prost(string, tag = "1")]
pub recent_search: ::prost::alloc::string::String,
}
}
#[doc = r" Generated client implementations."]
pub mod completion_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Auto-completion service for retail."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
#[derive(Debug, Clone)]
pub struct CompletionServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> CompletionServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> CompletionServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
CompletionServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Completes the specified prefix with keyword suggestions."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn complete_query(
&mut self,
request: impl tonic::IntoRequest<super::CompleteQueryRequest>,
) -> Result<tonic::Response<super::CompleteQueryResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.CompletionService/CompleteQuery",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Bulk import of processed completion dataset."]
#[doc = ""]
#[doc = " Request processing may be synchronous. Partial updating is not supported."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn import_completion_data(
&mut self,
request: impl tonic::IntoRequest<super::ImportCompletionDataRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.CompletionService/ImportCompletionData",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Configuration of destination for Export related errors.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportErrorsConfig {
/// Required. Errors destination.
#[prost(oneof = "export_errors_config::Destination", tags = "1")]
pub destination: ::core::option::Option<export_errors_config::Destination>,
}
/// Nested message and enum types in `ExportErrorsConfig`.
pub mod export_errors_config {
/// Required. Errors destination.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// Google Cloud Storage path for import errors. This must be an empty,
/// existing Cloud Storage bucket. Export errors will be written to a file in
/// this bucket, one per line, as a JSON-encoded
/// `google.rpc.Status` message.
#[prost(string, tag = "1")]
GcsPrefix(::prost::alloc::string::String),
}
}
/// Metadata related to the progress of the Export operation. This will be
/// returned by the google.longrunning.Operation.metadata field.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportMetadata {
/// Operation create time.
#[prost(message, optional, tag = "1")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Operation last update time. If the operation is done, this is also the
/// finish time.
#[prost(message, optional, tag = "2")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Response of the ExportProductsRequest. If the long running
/// operation is done, then this message is returned by the
/// google.longrunning.Operations.response field if the operation was successful.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportProductsResponse {
/// A sample of errors encountered while processing the request.
#[prost(message, repeated, tag = "1")]
pub error_samples: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
/// Echoes the destination for the complete errors in the request if set.
#[prost(message, optional, tag = "2")]
pub errors_config: ::core::option::Option<ExportErrorsConfig>,
}
/// Response of the ExportUserEventsRequest. If the long running
/// operation was successful, then this message is returned by the
/// google.longrunning.Operations.response field if the operation was successful.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportUserEventsResponse {
/// A sample of errors encountered while processing the request.
#[prost(message, repeated, tag = "1")]
pub error_samples: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
/// Echoes the destination for the complete errors if this field was set in
/// the request.
#[prost(message, optional, tag = "2")]
pub errors_config: ::core::option::Option<ExportErrorsConfig>,
}
/// Request message for Predict method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictRequest {
/// Required. Full resource name of the format:
/// {name=projects/*/locations/global/catalogs/default_catalog/placements/*}
/// The ID of the Recommendations AI placement. Before you can request
/// predictions from your model, you must create at least one placement for it.
/// For more information, see [Managing
/// placements](<https://cloud.google.com/retail/recommendations-ai/docs/manage-placements>).
///
/// The full list of available placements can be seen at
/// <https://console.cloud.google.com/recommendation/catalogs/default_catalog/placements>
#[prost(string, tag = "1")]
pub placement: ::prost::alloc::string::String,
/// Required. Context about the user, what they are looking at and what action
/// they took to trigger the predict request. Note that this user event detail
/// won't be ingested to userEvent logs. Thus, a separate userEvent write
/// request is required for event logging.
#[prost(message, optional, tag = "2")]
pub user_event: ::core::option::Option<UserEvent>,
/// Maximum number of results to return per page. Set this property
/// to the number of prediction results needed. If zero, the service will
/// choose a reasonable default. The maximum allowed value is 100. Values
/// above 100 will be coerced to 100.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The previous PredictResponse.next_page_token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Filter for restricting prediction results with a length limit of 5,000
/// characters. Accepts values for tags and the `filterOutOfStockItems` flag.
///
/// * Tag expressions. Restricts predictions to products that match all of the
/// specified tags. Boolean operators `OR` and `NOT` are supported if the
/// expression is enclosed in parentheses, and must be separated from the
/// tag values by a space. `-"tagA"` is also supported and is equivalent to
/// `NOT "tagA"`. Tag values must be double quoted UTF-8 encoded strings
/// with a size limit of 1,000 characters.
///
/// Note: "Recently viewed" models don't support tag filtering at the
/// moment.
///
/// * filterOutOfStockItems. Restricts predictions to products that do not
/// have a
/// stockState value of OUT_OF_STOCK.
///
/// Examples:
///
/// * tag=("Red" OR "Blue") tag="New-Arrival" tag=(NOT "promotional")
/// * filterOutOfStockItems tag=(-"promotional")
/// * filterOutOfStockItems
///
/// If your filter blocks all prediction results, nothing will be returned. If
/// you want generic (unfiltered) popular products to be returned instead, set
/// `strictFiltering` to false in `PredictRequest.params`.
#[prost(string, tag = "5")]
pub filter: ::prost::alloc::string::String,
/// Use validate only mode for this prediction query. If set to true, a
/// dummy model will be used that returns arbitrary products.
/// Note that the validate only mode should only be used for testing the API,
/// or if the model is not ready.
#[prost(bool, tag = "6")]
pub validate_only: bool,
/// Additional domain specific parameters for the predictions.
///
/// Allowed values:
///
/// * `returnProduct`: Boolean. If set to true, the associated product
/// object will be returned in the `results.metadata` field in the
/// prediction response.
/// * `returnScore`: Boolean. If set to true, the prediction 'score'
/// corresponding to each returned product will be set in the
/// `results.metadata` field in the prediction response. The given
/// 'score' indicates the probability of an product being clicked/purchased
/// given the user's context and history.
/// * `strictFiltering`: Boolean. True by default. If set to false, the service
/// will return generic (unfiltered) popular products instead of empty if
/// your filter blocks all prediction results.
/// * `priceRerankLevel`: String. Default empty. If set to be non-empty, then
/// it needs to be one of {'no-price-reranking', 'low-price-reranking',
/// 'medium-price-reranking', 'high-price-reranking'}. This gives
/// request-level control and adjusts prediction results based on product
/// price.
/// * `diversityLevel`: String. Default empty. If set to be non-empty, then
/// it needs to be one of {'no-diversity', 'low-diversity',
/// 'medium-diversity', 'high-diversity', 'auto-diversity'}. This gives
/// request-level control and adjusts prediction results based on product
/// category.
#[prost(map = "string, message", tag = "7")]
pub params: ::std::collections::HashMap<::prost::alloc::string::String, ::prost_types::Value>,
/// The labels applied to a resource must meet the following requirements:
///
/// * Each resource can have multiple labels, up to a maximum of 64.
/// * Each label must be a key-value pair.
/// * Keys have a minimum length of 1 character and a maximum length of 63
/// characters, and cannot be empty. Values can be empty, and have a maximum
/// length of 63 characters.
/// * Keys and values can contain only lowercase letters, numeric characters,
/// underscores, and dashes. All characters must use UTF-8 encoding, and
/// international characters are allowed.
/// * The key portion of a label must be unique. However, you can use the same
/// key with multiple resources.
/// * Keys must start with a lowercase letter or international character.
///
/// See [Google Cloud
/// Document](<https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements>)
/// for more details.
#[prost(map = "string, string", tag = "8")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// Response message for predict method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictResponse {
/// A list of recommended products. The order represents the ranking (from the
/// most relevant product to the least).
#[prost(message, repeated, tag = "1")]
pub results: ::prost::alloc::vec::Vec<predict_response::PredictionResult>,
/// A unique attribution token. This should be included in the
/// \[UserEvent][google.cloud.retail.v2alpha.UserEvent\] logs resulting from this
/// recommendation, which enables accurate attribution of recommendation model
/// performance.
#[prost(string, tag = "2")]
pub attribution_token: ::prost::alloc::string::String,
/// IDs of products in the request that were missing from the inventory.
#[prost(string, repeated, tag = "3")]
pub missing_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// True if the validateOnly property was set in the request.
#[prost(bool, tag = "4")]
pub validate_only: bool,
}
/// Nested message and enum types in `PredictResponse`.
pub mod predict_response {
/// PredictionResult represents the recommendation prediction results.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictionResult {
/// ID of the recommended product
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Additional product metadata / annotations.
///
/// Possible values:
///
/// * `product`: JSON representation of the product. Will be set if
/// `returnProduct` is set to true in `PredictRequest.params`.
/// * `score`: Prediction score in double value. Will be set if
/// `returnScore` is set to true in `PredictRequest.params`.
#[prost(map = "string, message", tag = "2")]
pub metadata:
::std::collections::HashMap<::prost::alloc::string::String, ::prost_types::Value>,
}
}
#[doc = r" Generated client implementations."]
pub mod prediction_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Service for making recommendation prediction."]
#[derive(Debug, Clone)]
pub struct PredictionServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> PredictionServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> PredictionServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
PredictionServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Makes a recommendation prediction."]
pub async fn predict(
&mut self,
request: impl tonic::IntoRequest<super::PredictRequest>,
) -> Result<tonic::Response<super::PredictResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.PredictionService/Predict",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Metadata related to the progress of the Purge operation.
/// This will be returned by the google.longrunning.Operation.metadata field.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeMetadata {}
/// Request message for PurgeUserEvents method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeUserEventsRequest {
/// Required. The resource name of the catalog under which the events are
/// created. The format is
/// `projects/${projectId}/locations/global/catalogs/${catalogId}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The filter string to specify the events to be deleted with a
/// length limit of 5,000 characters. Empty string filter is not allowed. The
/// eligible fields for filtering are:
///
/// * `eventType`: Double quoted
/// \[UserEvent.event_type][google.cloud.retail.v2alpha.UserEvent.event_type\]
/// string.
/// * `eventTime`: in ISO 8601 "zulu" format.
/// * `visitorId`: Double quoted string. Specifying this will delete all
/// events associated with a visitor.
/// * `userId`: Double quoted string. Specifying this will delete all events
/// associated with a user.
///
/// Examples:
///
/// * Deleting all events in a time range:
/// `eventTime > "2012-04-23T18:25:43.511Z"
/// eventTime < "2012-04-23T18:30:43.511Z"`
/// * Deleting specific eventType in time range:
/// `eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"`
/// * Deleting all events for a specific visitor:
/// `visitorId = "visitor1024"`
///
/// The filtering fields are assumed to have an implicit AND.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// Actually perform the purge.
/// If `force` is set to false, the method will return the expected purge count
/// without deleting any user events.
#[prost(bool, tag = "3")]
pub force: bool,
}
/// Response of the PurgeUserEventsRequest. If the long running operation is
/// successfully done, then this message is returned by the
/// google.longrunning.Operations.response field.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeUserEventsResponse {
/// The total count of events purged as a result of the operation.
#[prost(int64, tag = "1")]
pub purged_events_count: i64,
}
/// Request message for \[CreateProduct][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateProductRequest {
/// Required. The parent catalog resource name, such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch`.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The \[Product][google.cloud.retail.v2alpha.Product\] to create.
#[prost(message, optional, tag = "2")]
pub product: ::core::option::Option<Product>,
/// Required. The ID to use for the
/// \[Product][google.cloud.retail.v2alpha.Product\], which will become the final
/// component of the \[Product.name][google.cloud.retail.v2alpha.Product.name\].
///
/// If the caller does not have permission to create the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
///
/// This field must be unique among all
/// \[Product][google.cloud.retail.v2alpha.Product\]s with the same
/// \[parent][google.cloud.retail.v2alpha.CreateProductRequest.parent\].
/// Otherwise, an ALREADY_EXISTS error is returned.
///
/// This field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "3")]
pub product_id: ::prost::alloc::string::String,
}
/// Request message for \[GetProduct][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetProductRequest {
/// Required. Full resource name of
/// \[Product][google.cloud.retail.v2alpha.Product\], such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`.
///
/// If the caller does not have permission to access the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
///
/// If the requested \[Product][google.cloud.retail.v2alpha.Product\] does not
/// exist, a NOT_FOUND error is returned.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[UpdateProduct][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateProductRequest {
/// Required. The product to update/create.
///
/// If the caller does not have permission to update the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
///
/// If the \[Product][google.cloud.retail.v2alpha.Product\] to update does not
/// exist and
/// \[allow_missing][google.cloud.retail.v2alpha.UpdateProductRequest.allow_missing\]
/// is not set, a NOT_FOUND error is returned.
#[prost(message, optional, tag = "1")]
pub product: ::core::option::Option<Product>,
/// Indicates which fields in the provided
/// \[Product][google.cloud.retail.v2alpha.Product\] to update. The immutable and
/// output only fields are NOT supported. If not set, all supported fields (the
/// fields that are neither immutable nor output only) are updated.
///
/// If an unsupported or unknown field is provided, an INVALID_ARGUMENT error
/// is returned.
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// If set to true, and the \[Product][google.cloud.retail.v2alpha.Product\] is
/// not found, a new \[Product][google.cloud.retail.v2alpha.Product\] will be
/// created. In this situation, `update_mask` is ignored.
#[prost(bool, tag = "3")]
pub allow_missing: bool,
}
/// Request message for \[DeleteProduct][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteProductRequest {
/// Required. Full resource name of
/// \[Product][google.cloud.retail.v2alpha.Product\], such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`.
///
/// If the caller does not have permission to delete the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
///
/// If the \[Product][google.cloud.retail.v2alpha.Product\] to delete does not
/// exist, a NOT_FOUND error is returned.
///
/// The \[Product][google.cloud.retail.v2alpha.Product\] to delete can neither be
/// a
/// \[Product.Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION\]
/// \[Product][google.cloud.retail.v2alpha.Product\] member nor a
/// \[Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\] with more than one
/// \[variants][google.cloud.retail.v2alpha.Product.Type.VARIANT\]. Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// All inventory information for the named
/// \[Product][google.cloud.retail.v2alpha.Product\] will be deleted.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for
/// \[ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListProductsRequest {
/// Required. The parent branch resource name, such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/0`. Use
/// `default_branch` as the branch ID, to list products under the default
/// branch.
///
/// If the caller does not have permission to list
/// \[Product][google.cloud.retail.v2alpha.Product\]s under this branch,
/// regardless of whether or not this branch exists, a PERMISSION_DENIED error
/// is returned.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Maximum number of \[Product][google.cloud.retail.v2alpha.Product\]s to
/// return. If unspecified, defaults to 100. The maximum allowed value is 1000.
/// Values above 1000 will be coerced to 1000.
///
/// If this field is negative, an INVALID_ARGUMENT error is returned.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token
/// \[ListProductsResponse.next_page_token][google.cloud.retail.v2alpha.ListProductsResponse.next_page_token\],
/// received from a previous
/// \[ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts\]
/// call. Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts\]
/// must match the call that provided the page token. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// A filter to apply on the list results. Supported features:
///
/// * List all the products under the parent branch if
/// \[filter][google.cloud.retail.v2alpha.ListProductsRequest.filter\] is unset.
/// * List
/// \[Product.Type.VARIANT][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s sharing the same
/// \[Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\]. For example:
/// `primary_product_id = "some_product_id"`
/// * List \[Product][google.cloud.retail.v2alpha.Product\]s bundled in a
/// \[Product.Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION\]
/// \[Product][google.cloud.retail.v2alpha.Product\].
/// For example:
/// `collection_product_id = "some_product_id"`
/// * List \[Product][google.cloud.retail.v2alpha.Product\]s with a partibular
/// type. For example:
/// `type = "PRIMARY"`
/// `type = "VARIANT"`
/// `type = "COLLECTION"`
///
/// If the field is unrecognizable, an INVALID_ARGUMENT error is returned.
///
/// If the specified
/// \[Product.Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY\]
/// \[Product][google.cloud.retail.v2alpha.Product\] or
/// \[Product.Type.COLLECTION][google.cloud.retail.v2alpha.Product.Type.COLLECTION\]
/// \[Product][google.cloud.retail.v2alpha.Product\] does not exist, a NOT_FOUND
/// error is returned.
#[prost(string, tag = "4")]
pub filter: ::prost::alloc::string::String,
/// The fields of \[Product][google.cloud.retail.v2alpha.Product\] to return in
/// the responses. If not set or empty, the following fields are returned:
///
/// * \[Product.name][google.cloud.retail.v2alpha.Product.name\]
/// * \[Product.id][google.cloud.retail.v2alpha.Product.id\]
/// * \[Product.title][google.cloud.retail.v2alpha.Product.title\]
/// * \[Product.uri][google.cloud.retail.v2alpha.Product.uri\]
/// * \[Product.images][google.cloud.retail.v2alpha.Product.images\]
/// * \[Product.price_info][google.cloud.retail.v2alpha.Product.price_info\]
/// * \[Product.brands][google.cloud.retail.v2alpha.Product.brands\]
///
/// If "*" is provided, all fields are returned.
/// \[Product.name][google.cloud.retail.v2alpha.Product.name\] is always returned
/// no matter what mask is set.
///
/// If an unsupported or unknown field is provided, an INVALID_ARGUMENT error
/// is returned.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// If true and
/// \[page_token][google.cloud.retail.v2alpha.ListProductsRequest.page_token\] is
/// empty,
/// \[ListProductsResponse.total_size][google.cloud.retail.v2alpha.ListProductsResponse.total_size\]
/// is set to the total count of matched items irrespective of pagination.
///
/// Notice that setting this field to true affects the performance.
#[prost(bool, tag = "6")]
pub require_total_size: bool,
}
/// Response message for
/// \[ProductService.ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListProductsResponse {
/// The \[Product][google.cloud.retail.v2alpha.Product\]s.
#[prost(message, repeated, tag = "1")]
pub products: ::prost::alloc::vec::Vec<Product>,
/// A token that can be sent as
/// \[ListProductsRequest.page_token][google.cloud.retail.v2alpha.ListProductsRequest.page_token\]
/// to retrieve the next page. If this field is omitted, there are no
/// subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
/// The total count of matched \[Product][google.cloud.retail.v2alpha.Product\]s
/// irrespective of pagination. The total number of
/// \[Product][google.cloud.retail.v2alpha.Product\]s returned by pagination may
/// be less than the
/// \[total_size][google.cloud.retail.v2alpha.ListProductsResponse.total_size\]
/// that matches.
///
/// This field is ignored if
/// \[ListProductsRequest.require_total_size][google.cloud.retail.v2alpha.ListProductsRequest.require_total_size\]
/// is not set or
/// \[ListProductsRequest.page_token][google.cloud.retail.v2alpha.ListProductsRequest.page_token\]
/// is not empty.
#[prost(int32, tag = "3")]
pub total_size: i32,
}
/// Request message for \[SetInventory][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SetInventoryRequest {
/// Required. The inventory information to update. The allowable fields to
/// update are:
/// * \[Product.price_info][google.cloud.retail.v2alpha.Product.price_info\]
/// * \[Product.availability][google.cloud.retail.v2alpha.Product.availability\]
/// * \[Product.available_quantity][google.cloud.retail.v2alpha.Product.available_quantity\]
/// * \[Product.fulfillment_info][google.cloud.retail.v2alpha.Product.fulfillment_info\]
/// The updated inventory fields must be specified in
/// \[SetInventoryRequest.set_mask][google.cloud.retail.v2alpha.SetInventoryRequest.set_mask\].
///
/// If \[SetInventoryRequest.inventory.name][\] is empty or invalid, an
/// INVALID_ARGUMENT error is returned.
///
/// If the caller does not have permission to update the
/// \[Product][google.cloud.retail.v2alpha.Product\] named in
/// \[Product.name][google.cloud.retail.v2alpha.Product.name\], regardless of
/// whether or not it exists, a PERMISSION_DENIED error is returned.
///
/// If the \[Product][google.cloud.retail.v2alpha.Product\] to update does not
/// have existing inventory information, the provided inventory information
/// will be inserted.
///
/// If the \[Product][google.cloud.retail.v2alpha.Product\] to update has
/// existing inventory information, the provided inventory information will be
/// merged while respecting the last update time for each inventory field,
/// using the provided or default value for
/// \[SetInventoryRequest.set_time][google.cloud.retail.v2alpha.SetInventoryRequest.set_time\].
///
/// The last update time is recorded for the following inventory fields:
/// * \[Product.price_info][google.cloud.retail.v2alpha.Product.price_info\]
/// * \[Product.availability][google.cloud.retail.v2alpha.Product.availability\]
/// * \[Product.available_quantity][google.cloud.retail.v2alpha.Product.available_quantity\]
/// * \[Product.fulfillment_info][google.cloud.retail.v2alpha.Product.fulfillment_info\]
///
/// If a full overwrite of inventory information while ignoring timestamps is
/// needed, \[UpdateProduct][\] should be invoked instead.
#[prost(message, optional, tag = "1")]
pub inventory: ::core::option::Option<Product>,
/// Indicates which inventory fields in the provided
/// \[Product][google.cloud.retail.v2alpha.Product\] to update. If not set or set
/// with empty paths, all inventory fields will be updated.
///
/// If an unsupported or unknown field is provided, an INVALID_ARGUMENT error
/// is returned and the entire update will be ignored.
#[prost(message, optional, tag = "2")]
pub set_mask: ::core::option::Option<::prost_types::FieldMask>,
/// The time when the request is issued, used to prevent
/// out-of-order updates on inventory fields with the last update time
/// recorded. If not provided, the internal system time will be used.
#[prost(message, optional, tag = "3")]
pub set_time: ::core::option::Option<::prost_types::Timestamp>,
/// If set to true, and the \[Product][google.cloud.retail.v2alpha.Product\] with
/// name \[Product.name][google.cloud.retail.v2alpha.Product.name\] is not found,
/// the inventory update will still be processed and retained for at most 1 day
/// until the \[Product][google.cloud.retail.v2alpha.Product\] is created. If set
/// to false, a NOT_FOUND error is returned if the
/// \[Product][google.cloud.retail.v2alpha.Product\] is not found.
#[prost(bool, tag = "4")]
pub allow_missing: bool,
}
/// Metadata related to the progress of the SetInventory operation.
/// Currently empty because there is no meaningful metadata populated from the
/// \[SetInventory][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SetInventoryMetadata {}
/// Response of the SetInventoryRequest. Currently empty because
/// there is no meaningful response populated from the \[SetInventory][\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SetInventoryResponse {}
/// Request message for \[AddFulfillmentPlaces][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddFulfillmentPlacesRequest {
/// Required. Full resource name of
/// \[Product][google.cloud.retail.v2alpha.Product\], such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`.
///
/// If the caller does not have permission to access the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
#[prost(string, tag = "1")]
pub product: ::prost::alloc::string::String,
/// Required. The fulfillment type, including commonly used types (such as
/// pickup in store and same day delivery), and custom types.
///
/// Supported values:
///
/// * "pickup-in-store"
/// * "ship-to-store"
/// * "same-day-delivery"
/// * "next-day-delivery"
/// * "custom-type-1"
/// * "custom-type-2"
/// * "custom-type-3"
/// * "custom-type-4"
/// * "custom-type-5"
///
/// If this field is set to an invalid value other than these, an
/// INVALID_ARGUMENT error is returned.
///
/// This field directly corresponds to \[Product.fulfillment_info.type][\].
#[prost(string, tag = "2")]
pub r#type: ::prost::alloc::string::String,
/// Required. The IDs for this
/// \[type][google.cloud.retail.v2alpha.AddFulfillmentPlacesRequest.type\], such
/// as the store IDs for "pickup-in-store" or the region IDs for
/// "same-day-delivery" to be added for this
/// \[type][google.cloud.retail.v2alpha.AddFulfillmentPlacesRequest.type\].
/// Duplicate IDs will be automatically ignored.
///
/// At least 1 value is required, and a maximum of 2000 values are allowed.
/// Each value must be a string with a length limit of 10 characters, matching
/// the pattern `\[a-zA-Z0-9_-\]+`, such as "store1" or "REGION-2". Otherwise, an
/// INVALID_ARGUMENT error is returned.
///
/// If the total number of place IDs exceeds 2000 for this
/// \[type][google.cloud.retail.v2alpha.AddFulfillmentPlacesRequest.type\] after
/// adding, then the update will be rejected.
#[prost(string, repeated, tag = "3")]
pub place_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The time when the fulfillment updates are issued, used to prevent
/// out-of-order updates on fulfillment information. If not provided, the
/// internal system time will be used.
#[prost(message, optional, tag = "4")]
pub add_time: ::core::option::Option<::prost_types::Timestamp>,
/// If set to true, and the \[Product][google.cloud.retail.v2alpha.Product\] is
/// not found, the fulfillment information will still be processed and retained
/// for at most 1 day and processed once the
/// \[Product][google.cloud.retail.v2alpha.Product\] is created. If set to false,
/// a NOT_FOUND error is returned if the
/// \[Product][google.cloud.retail.v2alpha.Product\] is not found.
#[prost(bool, tag = "5")]
pub allow_missing: bool,
}
/// Metadata related to the progress of the AddFulfillmentPlaces operation.
/// Currently empty because there is no meaningful metadata populated from the
/// \[AddFulfillmentPlaces][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddFulfillmentPlacesMetadata {}
/// Response of the AddFulfillmentPlacesRequest. Currently empty because
/// there is no meaningful response populated from the \[AddFulfillmentPlaces][\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddFulfillmentPlacesResponse {}
/// Request message for \[AddLocalInventories][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddLocalInventoriesRequest {
/// Required. Full resource name of
/// \[Product][google.cloud.retail.v2alpha.Product\], such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`.
///
/// If the caller does not have permission to access the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
#[prost(string, tag = "1")]
pub product: ::prost::alloc::string::String,
/// Required. A list of inventory information at difference places. Each place
/// is identified by its place ID. At most 1000 inventories are allowed per
/// request.
#[prost(message, repeated, tag = "2")]
pub local_inventories: ::prost::alloc::vec::Vec<LocalInventory>,
/// Indicates which inventory fields in the provided list of
/// \[LocalInventory][google.cloud.retail.v2alpha.LocalInventory\] to update. The
/// field is updated to the provided value.
///
/// If a field is set while the place does not have a previous local inventory,
/// the local inventory at that store is created.
///
/// If a field is set while the value of that field is not provided, the
/// original field value, if it exists, is deleted.
///
/// If the mask is not set or set with empty paths, all inventory fields will
/// be updated.
///
/// If an unsupported or unknown field is provided, an INVALID_ARGUMENT error
/// is returned and the entire update will be ignored.
#[prost(message, optional, tag = "4")]
pub add_mask: ::core::option::Option<::prost_types::FieldMask>,
/// The time when the inventory updates are issued. Used to prevent
/// out-of-order updates on local inventory fields. If not provided, the
/// internal system time will be used.
#[prost(message, optional, tag = "5")]
pub add_time: ::core::option::Option<::prost_types::Timestamp>,
/// If set to true, and the \[Product][google.cloud.retail.v2alpha.Product\] is
/// not found, the local inventory will still be processed and retained for at
/// most 1 day and processed once the
/// \[Product][google.cloud.retail.v2alpha.Product\] is created. If set to false,
/// a NOT_FOUND error is returned if the
/// \[Product][google.cloud.retail.v2alpha.Product\] is not found.
#[prost(bool, tag = "6")]
pub allow_missing: bool,
}
/// Metadata related to the progress of the AddLocalInventories operation.
/// Currently empty because there is no meaningful metadata populated from the
/// \[AddLocalInventories][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddLocalInventoriesMetadata {}
/// Response of the \[AddLocalInventories][\] API. Currently empty because
/// there is no meaningful response populated from the \[AddLocalInventories][\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddLocalInventoriesResponse {}
/// Request message for \[RemoveLocalInventories][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RemoveLocalInventoriesRequest {
/// Required. Full resource name of
/// \[Product][google.cloud.retail.v2alpha.Product\], such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`.
///
/// If the caller does not have permission to access the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
#[prost(string, tag = "1")]
pub product: ::prost::alloc::string::String,
/// Required. A list of place IDs to have their inventory deleted.
/// At most 1000 place IDs are allowed per request.
#[prost(string, repeated, tag = "2")]
pub place_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The time when the inventory deletions are issued. Used to prevent
/// out-of-order updates and deletions on local inventory fields. If not
/// provided, the internal system time will be used.
#[prost(message, optional, tag = "5")]
pub remove_time: ::core::option::Option<::prost_types::Timestamp>,
/// If set to true, and the \[Product][google.cloud.retail.v2alpha.Product\] is
/// not found, the local inventory removal request will still be processed and
/// retained for at most 1 day and processed once the
/// \[Product][google.cloud.retail.v2alpha.Product\] is created. If set to false,
/// a NOT_FOUND error is returned if the
/// \[Product][google.cloud.retail.v2alpha.Product\] is not found.
#[prost(bool, tag = "3")]
pub allow_missing: bool,
}
/// Metadata related to the progress of the RemoveLocalInventories operation.
/// Currently empty because there is no meaningful metadata populated from the
/// \[RemoveLocalInventories][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RemoveLocalInventoriesMetadata {}
/// Response of the \[RemoveLocalInventories][\] API. Currently empty because
/// there is no meaningful response populated from the \[RemoveLocalInventories][\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RemoveLocalInventoriesResponse {}
/// Request message for \[RemoveFulfillmentPlaces][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RemoveFulfillmentPlacesRequest {
/// Required. Full resource name of
/// \[Product][google.cloud.retail.v2alpha.Product\], such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`.
///
/// If the caller does not have permission to access the
/// \[Product][google.cloud.retail.v2alpha.Product\], regardless of whether or
/// not it exists, a PERMISSION_DENIED error is returned.
#[prost(string, tag = "1")]
pub product: ::prost::alloc::string::String,
/// Required. The fulfillment type, including commonly used types (such as
/// pickup in store and same day delivery), and custom types.
///
/// Supported values:
///
/// * "pickup-in-store"
/// * "ship-to-store"
/// * "same-day-delivery"
/// * "next-day-delivery"
/// * "custom-type-1"
/// * "custom-type-2"
/// * "custom-type-3"
/// * "custom-type-4"
/// * "custom-type-5"
///
/// If this field is set to an invalid value other than these, an
/// INVALID_ARGUMENT error is returned.
///
/// This field directly corresponds to \[Product.fulfillment_info.type][\].
#[prost(string, tag = "2")]
pub r#type: ::prost::alloc::string::String,
/// Required. The IDs for this
/// \[type][google.cloud.retail.v2alpha.RemoveFulfillmentPlacesRequest.type\],
/// such as the store IDs for "pickup-in-store" or the region IDs for
/// "same-day-delivery", to be removed for this
/// \[type][google.cloud.retail.v2alpha.RemoveFulfillmentPlacesRequest.type\].
///
/// At least 1 value is required, and a maximum of 2000 values are allowed.
/// Each value must be a string with a length limit of 10 characters, matching
/// the pattern `\[a-zA-Z0-9_-\]+`, such as "store1" or "REGION-2". Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[prost(string, repeated, tag = "3")]
pub place_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The time when the fulfillment updates are issued, used to prevent
/// out-of-order updates on fulfillment information. If not provided, the
/// internal system time will be used.
#[prost(message, optional, tag = "4")]
pub remove_time: ::core::option::Option<::prost_types::Timestamp>,
/// If set to true, and the \[Product][google.cloud.retail.v2alpha.Product\] is
/// not found, the fulfillment information will still be processed and retained
/// for at most 1 day and processed once the
/// \[Product][google.cloud.retail.v2alpha.Product\] is created. If set to false,
/// a NOT_FOUND error is returned if the
/// \[Product][google.cloud.retail.v2alpha.Product\] is not found.
#[prost(bool, tag = "5")]
pub allow_missing: bool,
}
/// Metadata related to the progress of the RemoveFulfillmentPlaces operation.
/// Currently empty because there is no meaningful metadata populated from the
/// \[RemoveFulfillmentPlaces][\] method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RemoveFulfillmentPlacesMetadata {}
/// Response of the RemoveFulfillmentPlacesRequest. Currently empty because there
/// is no meaningful response populated from the \[RemoveFulfillmentPlaces][\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RemoveFulfillmentPlacesResponse {}
#[doc = r" Generated client implementations."]
pub mod product_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Service for ingesting [Product][google.cloud.retail.v2alpha.Product]"]
#[doc = " information of the customer's website."]
#[derive(Debug, Clone)]
pub struct ProductServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> ProductServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> ProductServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
ProductServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a [Product][google.cloud.retail.v2alpha.Product]."]
pub async fn create_product(
&mut self,
request: impl tonic::IntoRequest<super::CreateProductRequest>,
) -> Result<tonic::Response<super::Product>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/CreateProduct",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a [Product][google.cloud.retail.v2alpha.Product]."]
pub async fn get_product(
&mut self,
request: impl tonic::IntoRequest<super::GetProductRequest>,
) -> Result<tonic::Response<super::Product>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/GetProduct",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a list of [Product][google.cloud.retail.v2alpha.Product]s."]
pub async fn list_products(
&mut self,
request: impl tonic::IntoRequest<super::ListProductsRequest>,
) -> Result<tonic::Response<super::ListProductsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/ListProducts",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a [Product][google.cloud.retail.v2alpha.Product]."]
pub async fn update_product(
&mut self,
request: impl tonic::IntoRequest<super::UpdateProductRequest>,
) -> Result<tonic::Response<super::Product>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/UpdateProduct",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a [Product][google.cloud.retail.v2alpha.Product]."]
pub async fn delete_product(
&mut self,
request: impl tonic::IntoRequest<super::DeleteProductRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/DeleteProduct",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Bulk import of multiple [Product][google.cloud.retail.v2alpha.Product]s."]
#[doc = ""]
#[doc = " Request processing may be synchronous. No partial updating is supported."]
#[doc = " Non-existing items are created."]
#[doc = ""]
#[doc = " Note that it is possible for a subset of the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product]s to be successfully updated."]
pub async fn import_products(
&mut self,
request: impl tonic::IntoRequest<super::ImportProductsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/ImportProducts",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates inventory information for a"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] while respecting the last"]
#[doc = " update timestamps of each inventory field."]
#[doc = ""]
#[doc = " This process is asynchronous and does not require the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] to exist before updating"]
#[doc = " fulfillment information. If the request is valid, the update will be"]
#[doc = " enqueued and processed downstream. As a consequence, when a response is"]
#[doc = " returned, updates are not immediately manifested in the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] queried by"]
#[doc = " [GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct] or"]
#[doc = " [ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]."]
#[doc = ""]
#[doc = " When inventory is updated with"]
#[doc = " [CreateProduct][google.cloud.retail.v2alpha.ProductService.CreateProduct]"]
#[doc = " and"]
#[doc = " [UpdateProduct][google.cloud.retail.v2alpha.ProductService.UpdateProduct],"]
#[doc = " the specified inventory field value(s) will overwrite any existing value(s)"]
#[doc = " while ignoring the last update time for this field. Furthermore, the last"]
#[doc = " update time for the specified inventory fields will be overwritten to the"]
#[doc = " time of the"]
#[doc = " [CreateProduct][google.cloud.retail.v2alpha.ProductService.CreateProduct]"]
#[doc = " or"]
#[doc = " [UpdateProduct][google.cloud.retail.v2alpha.ProductService.UpdateProduct]"]
#[doc = " request."]
#[doc = ""]
#[doc = " If no inventory fields are set in"]
#[doc = " [CreateProductRequest.product][google.cloud.retail.v2alpha.CreateProductRequest.product],"]
#[doc = " then any pre-existing inventory information for this product will be used."]
#[doc = ""]
#[doc = " If no inventory fields are set in [UpdateProductRequest.set_mask][],"]
#[doc = " then any existing inventory information will be preserved."]
#[doc = ""]
#[doc = " Pre-existing inventory information can only be updated with"]
#[doc = " [SetInventory][google.cloud.retail.v2alpha.ProductService.SetInventory],"]
#[doc = " [AddFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.AddFulfillmentPlaces],"]
#[doc = " and"]
#[doc = " [RemoveFulfillmentPlaces][google.cloud.retail.v2alpha.ProductService.RemoveFulfillmentPlaces]."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn set_inventory(
&mut self,
request: impl tonic::IntoRequest<super::SetInventoryRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/SetInventory",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Incrementally adds place IDs to"]
#[doc = " [Product.fulfillment_info.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids]."]
#[doc = ""]
#[doc = " This process is asynchronous and does not require the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] to exist before updating"]
#[doc = " fulfillment information. If the request is valid, the update will be"]
#[doc = " enqueued and processed downstream. As a consequence, when a response is"]
#[doc = " returned, the added place IDs are not immediately manifested in the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] queried by"]
#[doc = " [GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct] or"]
#[doc = " [ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn add_fulfillment_places(
&mut self,
request: impl tonic::IntoRequest<super::AddFulfillmentPlacesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/AddFulfillmentPlaces",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Incrementally removes place IDs from a"]
#[doc = " [Product.fulfillment_info.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids]."]
#[doc = ""]
#[doc = " This process is asynchronous and does not require the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] to exist before updating"]
#[doc = " fulfillment information. If the request is valid, the update will be"]
#[doc = " enqueued and processed downstream. As a consequence, when a response is"]
#[doc = " returned, the removed place IDs are not immediately manifested in the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] queried by"]
#[doc = " [GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct] or"]
#[doc = " [ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn remove_fulfillment_places(
&mut self,
request: impl tonic::IntoRequest<super::RemoveFulfillmentPlacesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/RemoveFulfillmentPlaces",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates local inventory information for a"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] at a list of places, while"]
#[doc = " respecting the last update timestamps of each inventory field."]
#[doc = ""]
#[doc = " This process is asynchronous and does not require the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] to exist before updating"]
#[doc = " inventory information. If the request is valid, the update will be enqueued"]
#[doc = " and processed downstream. As a consequence, when a response is returned,"]
#[doc = " updates are not immediately manifested in the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] queried by"]
#[doc = " [GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct] or"]
#[doc = " [ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]."]
#[doc = ""]
#[doc = " Store inventory information can only be modified using this method."]
#[doc = " [CreateProduct][google.cloud.retail.v2alpha.ProductService.CreateProduct]"]
#[doc = " and"]
#[doc = " [UpdateProduct][google.cloud.retail.v2alpha.ProductService.UpdateProduct]"]
#[doc = " has no effect on local inventories."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " Cloud sales if you are interested in using Retail Search."]
pub async fn add_local_inventories(
&mut self,
request: impl tonic::IntoRequest<super::AddLocalInventoriesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/AddLocalInventories",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Remove local inventory information for a"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] at a list of places at a"]
#[doc = " removal timestamp."]
#[doc = ""]
#[doc = " This process is asynchronous. If the request is valid, the removal will be"]
#[doc = " enqueued and processed downstream. As a consequence, when a response is"]
#[doc = " returned, removals are not immediately manifested in the"]
#[doc = " [Product][google.cloud.retail.v2alpha.Product] queried by"]
#[doc = " [GetProduct][google.cloud.retail.v2alpha.ProductService.GetProduct] or"]
#[doc = " [ListProducts][google.cloud.retail.v2alpha.ProductService.ListProducts]."]
#[doc = ""]
#[doc = " Store inventory information can only be removed using this method."]
#[doc = " [CreateProduct][google.cloud.retail.v2alpha.ProductService.CreateProduct]"]
#[doc = " and"]
#[doc = " [UpdateProduct][google.cloud.retail.v2alpha.ProductService.UpdateProduct]"]
#[doc = " has no effect on local inventories."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " Cloud sales if you are interested in using Retail Search."]
pub async fn remove_local_inventories(
&mut self,
request: impl tonic::IntoRequest<super::RemoveLocalInventoriesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.ProductService/RemoveLocalInventories",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchRequest {
/// Required. The resource name of the search engine placement, such as
/// `projects/*/locations/global/catalogs/default_catalog/placements/default_search`.
/// This field is used to identify the serving configuration name and the set
/// of models that will be used to make the search.
#[prost(string, tag = "1")]
pub placement: ::prost::alloc::string::String,
/// The branch resource name, such as
/// `projects/*/locations/global/catalogs/default_catalog/branches/0`.
///
/// Use "default_branch" as the branch ID or leave this field empty, to search
/// products under the default branch.
#[prost(string, tag = "2")]
pub branch: ::prost::alloc::string::String,
/// Raw search query.
#[prost(string, tag = "3")]
pub query: ::prost::alloc::string::String,
/// Required. A unique identifier for tracking visitors. For example, this
/// could be implemented with an HTTP cookie, which should be able to uniquely
/// identify a visitor on a single device. This unique identifier should not
/// change if the visitor logs in or out of the website.
///
/// The field must be a UTF-8 encoded string with a length limit of 128
/// characters. Otherwise, an INVALID_ARGUMENT error is returned.
#[prost(string, tag = "4")]
pub visitor_id: ::prost::alloc::string::String,
/// User information.
#[prost(message, optional, tag = "5")]
pub user_info: ::core::option::Option<UserInfo>,
/// Maximum number of \[Product][google.cloud.retail.v2alpha.Product\]s to
/// return. If unspecified, defaults to a reasonable value. The maximum allowed
/// value is 120. Values above 120 will be coerced to 120.
///
/// If this field is negative, an INVALID_ARGUMENT is returned.
#[prost(int32, tag = "7")]
pub page_size: i32,
/// A page token
/// \[SearchResponse.next_page_token][google.cloud.retail.v2alpha.SearchResponse.next_page_token\],
/// received from a previous
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\]
/// call. Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\]
/// must match the call that provided the page token. Otherwise, an
/// INVALID_ARGUMENT error is returned.
#[prost(string, tag = "8")]
pub page_token: ::prost::alloc::string::String,
/// A 0-indexed integer that specifies the current offset (that is, starting
/// result location, amongst the
/// \[Product][google.cloud.retail.v2alpha.Product\]s deemed by the API as
/// relevant) in search results. This field is only considered if
/// \[page_token][google.cloud.retail.v2alpha.SearchRequest.page_token\] is
/// unset.
///
/// If this field is negative, an INVALID_ARGUMENT is returned.
#[prost(int32, tag = "9")]
pub offset: i32,
/// The filter syntax consists of an expression language for constructing a
/// predicate from one or more fields of the products being filtered. Filter
/// expression is case-sensitive. See more details at this [user
/// guide](<https://cloud.google.com/retail/docs/filter-and-order#filter>).
///
/// If this field is unrecognizable, an INVALID_ARGUMENT is returned.
#[prost(string, tag = "10")]
pub filter: ::prost::alloc::string::String,
/// The filter applied to every search request when quality improvement such as
/// query expansion is needed. For example, if a query does not have enough
/// results, an expanded query with
/// \[SearchRequest.canonical_filter][google.cloud.retail.v2alpha.SearchRequest.canonical_filter\]
/// will be returned as a supplement of the original query. This field is
/// strongly recommended to achieve high search quality.
///
/// See
/// \[SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter\]
/// for more details about filter syntax.
#[prost(string, tag = "28")]
pub canonical_filter: ::prost::alloc::string::String,
/// The order in which products are returned. Products can be ordered by
/// a field in an \[Product][google.cloud.retail.v2alpha.Product\] object. Leave
/// it unset if ordered by relevance. OrderBy expression is case-sensitive. See
/// more details at this [user
/// guide](<https://cloud.google.com/retail/docs/filter-and-order#order>).
///
/// If this field is unrecognizable, an INVALID_ARGUMENT is returned.
#[prost(string, tag = "11")]
pub order_by: ::prost::alloc::string::String,
/// Facet specifications for faceted search. If empty, no facets are returned.
///
/// A maximum of 100 values are allowed. Otherwise, an INVALID_ARGUMENT error
/// is returned.
#[prost(message, repeated, tag = "12")]
pub facet_specs: ::prost::alloc::vec::Vec<search_request::FacetSpec>,
/// The specification for dynamically generated facets. Notice that only
/// textual facets can be dynamically generated.
///
/// This feature requires additional allowlisting. Contact Retail Search
/// support team if you are interested in using dynamic facet feature.
#[prost(message, optional, tag = "21")]
pub dynamic_facet_spec: ::core::option::Option<search_request::DynamicFacetSpec>,
/// Boost specification to boost certain products. See more details at this
/// [user guide](<https://cloud.google.com/retail/docs/boosting>).
///
/// Notice that if both
/// \[ServingConfig.boost_control_ids][google.cloud.retail.v2alpha.ServingConfig.boost_control_ids\]
/// and \[SearchRequest.boost_spec\] are set, the boost conditions from both
/// places are evaluated. If a search request matches multiple boost
/// conditions, the final boost score is equal to the sum of the boost scores
/// from all matched boost conditions.
#[prost(message, optional, tag = "13")]
pub boost_spec: ::core::option::Option<search_request::BoostSpec>,
/// The query expansion specification that specifies the conditions under which
/// query expansion will occur. See more details at this [user
/// guide](<https://cloud.google.com/retail/docs/result-size#query_expansion>).
#[prost(message, optional, tag = "14")]
pub query_expansion_spec: ::core::option::Option<search_request::QueryExpansionSpec>,
/// The relevance threshold of the search results.
///
/// Defaults to
/// \[RelevanceThreshold.HIGH][google.cloud.retail.v2alpha.SearchRequest.RelevanceThreshold.HIGH\],
/// which means only the most relevant results are shown, and the least number
/// of results are returned. See more details at this [user
/// guide](<https://cloud.google.com/retail/docs/result-size#relevance_thresholding>).
#[prost(enumeration = "search_request::RelevanceThreshold", tag = "15")]
pub relevance_threshold: i32,
/// The keys to fetch and rollup the matching
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s attributes. The attributes
/// from all the matching
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s are merged and
/// de-duplicated. Notice that rollup
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s attributes will lead to
/// extra query latency. Maximum number of keys is 10.
///
/// For \[FulfillmentInfo][google.cloud.retail.v2alpha.FulfillmentInfo\], a
/// fulfillment type and a fulfillment ID must be provided in the format of
/// "fulfillmentType.fulfillmentId". E.g., in "pickupInStore.store123",
/// "pickupInStore" is fulfillment type and "store123" is the store ID.
///
/// Supported keys are:
///
/// * colorFamilies
/// * price
/// * originalPrice
/// * discount
/// * variantId
/// * inventory(place_id,price)
/// * inventory(place_id,attributes.key), where key is any key in the
/// \[Product.inventories.attributes][\] map.
/// * attributes.key, where key is any key in the
/// \[Product.attributes][google.cloud.retail.v2alpha.Product.attributes\] map.
/// * pickupInStore.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "pickup-in-store".
/// * shipToStore.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "ship-to-store".
/// * sameDayDelivery.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "same-day-delivery".
/// * nextDayDelivery.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "next-day-delivery".
/// * customFulfillment1.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "custom-type-1".
/// * customFulfillment2.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "custom-type-2".
/// * customFulfillment3.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "custom-type-3".
/// * customFulfillment4.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "custom-type-4".
/// * customFulfillment5.id, where id is any
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// for
/// \[FulfillmentInfo.type][google.cloud.retail.v2alpha.FulfillmentInfo.type\]
/// "custom-type-5".
///
/// If this field is set to an invalid value other than these, an
/// INVALID_ARGUMENT error is returned.
#[prost(string, repeated, tag = "17")]
pub variant_rollup_keys: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The categories associated with a category page. Required for category
/// navigation queries to achieve good search quality. The format should be
/// the same as
/// \[UserEvent.page_categories][google.cloud.retail.v2alpha.UserEvent.page_categories\];
///
/// To represent full path of category, use '>' sign to separate different
/// hierarchies. If '>' is part of the category name, please replace it with
/// other character(s).
///
/// Category pages include special pages such as sales or promotions. For
/// instance, a special sale page may have the category hierarchy:
/// "pageCategories" : ["Sales > 2017 Black Friday Deals"].
#[prost(string, repeated, tag = "23")]
pub page_categories: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The search mode of the search request. If not specified, a single search
/// request triggers both product search and faceted search.
#[prost(enumeration = "search_request::SearchMode", tag = "31")]
pub search_mode: i32,
}
/// Nested message and enum types in `SearchRequest`.
pub mod search_request {
/// A facet specification to perform faceted search.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FacetSpec {
/// Required. The facet key specification.
#[prost(message, optional, tag = "1")]
pub facet_key: ::core::option::Option<facet_spec::FacetKey>,
/// Maximum of facet values that should be returned for this facet. If
/// unspecified, defaults to 20. The maximum allowed value is 300. Values
/// above 300 will be coerced to 300.
///
/// If this field is negative, an INVALID_ARGUMENT is returned.
#[prost(int32, tag = "2")]
pub limit: i32,
/// List of keys to exclude when faceting.
///
/// By default,
/// \[FacetKey.key][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.key\]
/// is not excluded from the filter unless it is listed in this field.
///
/// For example, suppose there are 100 products with color facet "Red" and
/// 200 products with color facet "Blue". A query containing the filter
/// "colorFamilies:ANY("Red")" and have "colorFamilies" as
/// \[FacetKey.key][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.key\]
/// will by default return the "Red" with count 100.
///
/// If this field contains "colorFamilies", then the query returns both the
/// "Red" with count 100 and "Blue" with count 200, because the
/// "colorFamilies" key is now excluded from the filter.
///
/// A maximum of 100 values are allowed. Otherwise, an INVALID_ARGUMENT error
/// is returned.
#[prost(string, repeated, tag = "3")]
pub excluded_filter_keys: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Enables dynamic position for this facet. If set to true, the position of
/// this facet among all facets in the response is determined by Google
/// Retail Search. It will be ordered together with dynamic facets if dynamic
/// facets is enabled. If set to false, the position of this facet in the
/// response will be the same as in the request, and it will be ranked before
/// the facets with dynamic position enable and all dynamic facets.
///
/// For example, you may always want to have rating facet returned in
/// the response, but it's not necessarily to always display the rating facet
/// at the top. In that case, you can set enable_dynamic_position to true so
/// that the position of rating facet in response will be determined by
/// Google Retail Search.
///
/// Another example, assuming you have the following facets in the request:
///
/// * "rating", enable_dynamic_position = true
///
/// * "price", enable_dynamic_position = false
///
/// * "brands", enable_dynamic_position = false
///
/// And also you have a dynamic facets enable, which will generate a facet
/// 'gender'. Then the final order of the facets in the response can be
/// ("price", "brands", "rating", "gender") or ("price", "brands", "gender",
/// "rating") depends on how Google Retail Search orders "gender" and
/// "rating" facets. However, notice that "price" and "brands" will always be
/// ranked at 1st and 2nd position since their enable_dynamic_position are
/// false.
#[prost(bool, tag = "4")]
pub enable_dynamic_position: bool,
}
/// Nested message and enum types in `FacetSpec`.
pub mod facet_spec {
/// Specifies how a facet is computed.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FacetKey {
/// Required. Supported textual and numerical facet keys in
/// \[Product][google.cloud.retail.v2alpha.Product\] object, over which the
/// facet values are computed. Facet key is case-sensitive.
///
/// Allowed facet keys when
/// \[FacetKey.query][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.query\]
/// is not specified:
///
/// * textual_field =
/// * "brands"
/// * "categories"
/// * "genders"
/// * "ageGroups"
/// * "availability"
/// * "colorFamilies"
/// * "colors"
/// * "sizes"
/// * "materials"
/// * "patterns"
/// * "conditions"
/// * "attributes.key"
/// * "pickupInStore"
/// * "shipToStore"
/// * "sameDayDelivery"
/// * "nextDayDelivery"
/// * "customFulfillment1"
/// * "customFulfillment2"
/// * "customFulfillment3"
/// * "customFulfillment4"
/// * "customFulfillment5"
/// * "inventory(place_id,attributes.key)"
///
/// * numerical_field =
/// * "price"
/// * "discount"
/// * "rating"
/// * "ratingCount"
/// * "attributes.key"
/// * "inventory(place_id,price)"
/// * "inventory(place_id,attributes.key)"
#[prost(string, tag = "1")]
pub key: ::prost::alloc::string::String,
/// Set only if values should be bucketized into intervals. Must be set
/// for facets with numerical values. Must not be set for facet with text
/// values. Maximum number of intervals is 30.
#[prost(message, repeated, tag = "2")]
pub intervals: ::prost::alloc::vec::Vec<super::super::Interval>,
/// Only get facet for the given restricted values. For example, when using
/// "pickupInStore" as key and set restricted values to
/// ["store123", "store456"], only facets for "store123" and "store456" are
/// returned. Only supported on textual fields and fulfillments.
/// Maximum is 20.
///
/// Must be set for the fulfillment facet keys:
///
/// * pickupInStore
///
/// * shipToStore
///
/// * sameDayDelivery
///
/// * nextDayDelivery
///
/// * customFulfillment1
///
/// * customFulfillment2
///
/// * customFulfillment3
///
/// * customFulfillment4
///
/// * customFulfillment5
#[prost(string, repeated, tag = "3")]
pub restricted_values: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Only get facet values that start with the given string prefix. For
/// example, suppose "categories" has three values "Women > Shoe",
/// "Women > Dress" and "Men > Shoe". If set "prefixes" to "Women", the
/// "categories" facet will give only "Women > Shoe" and "Women > Dress".
/// Only supported on textual fields. Maximum is 10.
#[prost(string, repeated, tag = "8")]
pub prefixes: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Only get facet values that contains the given strings. For example,
/// suppose "categories" has three values "Women > Shoe",
/// "Women > Dress" and "Men > Shoe". If set "contains" to "Shoe", the
/// "categories" facet will give only "Women > Shoe" and "Men > Shoe".
/// Only supported on textual fields. Maximum is 10.
#[prost(string, repeated, tag = "9")]
pub contains: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The order in which \[Facet.values][\] are returned.
///
/// Allowed values are:
///
/// * "count desc", which means order by \[Facet.FacetValue.count][\]
/// descending.
///
/// * "value desc", which means order by \[Facet.FacetValue.value][\]
/// descending.
/// Only applies to textual facets.
///
/// If not set, textual values are sorted in [natural
/// order](<https://en.wikipedia.org/wiki/Natural_sort_order>); numerical
/// intervals are sorted in the order given by
/// \[FacetSpec.FacetKey.intervals][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.intervals\];
/// \[FulfillmentInfo.place_ids][google.cloud.retail.v2alpha.FulfillmentInfo.place_ids\]
/// are sorted in the order given by
/// \[FacetSpec.FacetKey.restricted_values][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.restricted_values\].
#[prost(string, tag = "4")]
pub order_by: ::prost::alloc::string::String,
/// The query that is used to compute facet for the given facet key.
/// When provided, it will override the default behavior of facet
/// computation. The query syntax is the same as a filter expression. See
/// \[SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter\]
/// for detail syntax and limitations. Notice that there is no limitation
/// on
/// \[FacetKey.key][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.key\]
/// when query is specified.
///
/// In the response, \[FacetValue.value][\] will be always "1" and
/// \[FacetValue.count][\] will be the number of results that matches the
/// query.
///
/// For example, you can set a customized facet for "shipToStore",
/// where
/// \[FacetKey.key][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.key\]
/// is "customizedShipToStore", and
/// \[FacetKey.query][google.cloud.retail.v2alpha.SearchRequest.FacetSpec.FacetKey.query\]
/// is "availability: ANY(\"IN_STOCK\") AND shipToStore: ANY(\"123\")".
/// Then the facet will count the products that are both in stock and ship
/// to store "123".
#[prost(string, tag = "5")]
pub query: ::prost::alloc::string::String,
}
}
/// The specifications of dynamically generated facets.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DynamicFacetSpec {
/// Mode of the DynamicFacet feature.
/// Defaults to
/// \[Mode.DISABLED][google.cloud.retail.v2alpha.SearchRequest.DynamicFacetSpec.Mode.DISABLED\]
/// if it's unset.
#[prost(enumeration = "dynamic_facet_spec::Mode", tag = "1")]
pub mode: i32,
}
/// Nested message and enum types in `DynamicFacetSpec`.
pub mod dynamic_facet_spec {
/// Enum to control DynamicFacet mode
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum Mode {
/// Default value.
Unspecified = 0,
/// Disable Dynamic Facet.
Disabled = 1,
/// Automatic mode built by Google Retail Search.
Enabled = 2,
}
}
/// Boost specification to boost certain items.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BoostSpec {
/// Condition boost specifications. If a product matches multiple conditions
/// in the specifictions, boost scores from these specifications are all
/// applied and combined in a non-linear way. Maximum number of
/// specifications is 10.
#[prost(message, repeated, tag = "1")]
pub condition_boost_specs: ::prost::alloc::vec::Vec<boost_spec::ConditionBoostSpec>,
}
/// Nested message and enum types in `BoostSpec`.
pub mod boost_spec {
/// Boost applies to products which match a condition.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ConditionBoostSpec {
/// An expression which specifies a boost condition. The syntax and
/// supported fields are the same as a filter expression. See
/// \[SearchRequest.filter][google.cloud.retail.v2alpha.SearchRequest.filter\]
/// for detail syntax and limitations.
///
/// Examples:
///
/// * To boost products with product ID "product_1" or "product_2", and
/// color
/// "Red" or "Blue":
/// * (id: ANY("product_1", "product_2")) AND (colorFamilies:
/// ANY("Red","Blue"))
#[prost(string, tag = "1")]
pub condition: ::prost::alloc::string::String,
/// Strength of the condition boost, which should be in [-1, 1]. Negative
/// boost means demotion. Default is 0.0.
///
/// Setting to 1.0 gives the item a big promotion. However, it does not
/// necessarily mean that the boosted item will be the top result at all
/// times, nor that other items will be excluded. Results could still be
/// shown even when none of them matches the condition. And results that
/// are significantly more relevant to the search query can still trump
/// your heavily favored but irrelevant items.
///
/// Setting to -1.0 gives the item a big demotion. However, results that
/// are deeply relevant might still be shown. The item will have an
/// upstream battle to get a fairly high ranking, but it is not blocked out
/// completely.
///
/// Setting to 0.0 means no boost applied. The boosting condition is
/// ignored.
#[prost(float, tag = "2")]
pub boost: f32,
}
}
/// Specification to determine under which conditions query expansion should
/// occur.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryExpansionSpec {
/// The condition under which query expansion should occur. Default to
/// \[Condition.DISABLED][google.cloud.retail.v2alpha.SearchRequest.QueryExpansionSpec.Condition.DISABLED\].
#[prost(enumeration = "query_expansion_spec::Condition", tag = "1")]
pub condition: i32,
/// Whether to pin unexpanded results. If this field is set to true,
/// unexpanded products are always at the top of the search results, followed
/// by the expanded results.
#[prost(bool, tag = "2")]
pub pin_unexpanded_results: bool,
}
/// Nested message and enum types in `QueryExpansionSpec`.
pub mod query_expansion_spec {
/// Enum describing under which condition query expansion should occur.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum Condition {
/// Unspecified query expansion condition. This defaults to
/// \[Condition.DISABLED][google.cloud.retail.v2alpha.SearchRequest.QueryExpansionSpec.Condition.DISABLED\].
Unspecified = 0,
/// Disabled query expansion. Only the exact search query is used, even if
/// \[SearchResponse.total_size][google.cloud.retail.v2alpha.SearchResponse.total_size\]
/// is zero.
Disabled = 1,
/// Automatic query expansion built by Google Retail Search.
Auto = 3,
}
}
/// The relevance threshold of the search results. The higher relevance
/// threshold is, the higher relevant results are shown and the less number of
/// results are returned.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum RelevanceThreshold {
/// Default value. Defaults to
/// \[RelevanceThreshold.HIGH][google.cloud.retail.v2alpha.SearchRequest.RelevanceThreshold.HIGH\].
Unspecified = 0,
/// High relevance threshold.
High = 1,
/// Medium relevance threshold.
Medium = 2,
/// Low relevance threshold.
Low = 3,
/// Lowest relevance threshold.
Lowest = 4,
}
/// The search mode of each search request.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum SearchMode {
/// Default value. In this case both product search and faceted search will
/// be performed. Both \[SearchResponse.SearchResult\] and
/// \[SearchResponse.Facet\] will be returned.
Unspecified = 0,
/// Only product search will be performed. The faceted search will be
/// disabled.
///
/// Only \[SearchResponse.SearchResult\] will be returned.
/// \[SearchResponse.Facet\] will not be returned, even if
/// \[SearchRequest.facet_specs][google.cloud.retail.v2alpha.SearchRequest.facet_specs\]
/// or
/// \[SearchRequest.dynamic_facet_spec][google.cloud.retail.v2alpha.SearchRequest.dynamic_facet_spec\]
/// is set.
ProductSearchOnly = 1,
/// Only faceted search will be performed. The product search will be
/// disabled.
///
/// When in this mode, one or both of \[SearchRequest.facet_spec][\] and
/// \[SearchRequest.dynamic_facet_spec][google.cloud.retail.v2alpha.SearchRequest.dynamic_facet_spec\]
/// should be set. Otherwise, an INVALID_ARGUMENT error is returned. Only
/// \[SearchResponse.Facet\] will be returned. \[SearchResponse.SearchResult\]
/// will not be returned.
FacetedSearchOnly = 2,
}
}
/// Response message for
/// \[SearchService.Search][google.cloud.retail.v2alpha.SearchService.Search\]
/// method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchResponse {
/// A list of matched items. The order represents the ranking.
#[prost(message, repeated, tag = "1")]
pub results: ::prost::alloc::vec::Vec<search_response::SearchResult>,
/// Results of facets requested by user.
#[prost(message, repeated, tag = "2")]
pub facets: ::prost::alloc::vec::Vec<search_response::Facet>,
/// The estimated total count of matched items irrespective of pagination. The
/// count of \[results][google.cloud.retail.v2alpha.SearchResponse.results\]
/// returned by pagination may be less than the
/// \[total_size][google.cloud.retail.v2alpha.SearchResponse.total_size\] that
/// matches.
#[prost(int32, tag = "3")]
pub total_size: i32,
/// If spell correction applies, the corrected query. Otherwise, empty.
#[prost(string, tag = "4")]
pub corrected_query: ::prost::alloc::string::String,
/// A unique search token. This should be included in the
/// \[UserEvent][google.cloud.retail.v2alpha.UserEvent\] logs resulting from this
/// search, which enables accurate attribution of search model performance.
#[prost(string, tag = "5")]
pub attribution_token: ::prost::alloc::string::String,
/// A token that can be sent as
/// \[SearchRequest.page_token][google.cloud.retail.v2alpha.SearchRequest.page_token\]
/// to retrieve the next page. If this field is omitted, there are no
/// subsequent pages.
#[prost(string, tag = "6")]
pub next_page_token: ::prost::alloc::string::String,
/// Query expansion information for the returned results.
#[prost(message, optional, tag = "7")]
pub query_expansion_info: ::core::option::Option<search_response::QueryExpansionInfo>,
/// The URI of a customer-defined redirect page. If redirect action is
/// triggered, no search will be performed, and only
/// \[redirect_uri][google.cloud.retail.v2alpha.SearchResponse.redirect_uri\] and
/// \[attribution_token][google.cloud.retail.v2alpha.SearchResponse.attribution_token\]
/// will be set in the response.
#[prost(string, tag = "10")]
pub redirect_uri: ::prost::alloc::string::String,
}
/// Nested message and enum types in `SearchResponse`.
pub mod search_response {
/// Represents the search results.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchResult {
/// \[Product.id][google.cloud.retail.v2alpha.Product.id\] of the searched
/// \[Product][google.cloud.retail.v2alpha.Product\].
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// The product data snippet in the search response. Only
/// \[Product.name][google.cloud.retail.v2alpha.Product.name\] is guaranteed to
/// be populated.
///
/// \[Product.variants][google.cloud.retail.v2alpha.Product.variants\] contains
/// the product variants that match the search query. If there are multiple
/// product variants matching the query, top 5 most relevant product variants
/// are returned and ordered by relevancy.
///
/// If relevancy can be deternmined, use
/// \[matching_variant_fields][google.cloud.retail.v2alpha.SearchResponse.SearchResult.matching_variant_fields\]
/// to look up matched product variants fields. If relevancy cannot be
/// determined, e.g. when searching "shoe" all products in a shoe product can
/// be a match, 5 product variants are returned but order is meaningless.
#[prost(message, optional, tag = "2")]
pub product: ::core::option::Option<super::Product>,
/// The count of matched
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\]s.
#[prost(int32, tag = "3")]
pub matching_variant_count: i32,
/// If a \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\] matches the search query,
/// this map indicates which \[Product][google.cloud.retail.v2alpha.Product\]
/// fields are matched. The key is the
/// \[Product.name][google.cloud.retail.v2alpha.Product.name\], the value is a
/// field mask of the matched \[Product][google.cloud.retail.v2alpha.Product\]
/// fields. If matched attributes cannot be determined, this map will be
/// empty.
///
/// For example, a key "sku1" with field mask
/// "products.color_info" indicates there is a match between
/// "sku1" \[ColorInfo][google.cloud.retail.v2alpha.ColorInfo\] and the query.
#[prost(map = "string, message", tag = "4")]
pub matching_variant_fields:
::std::collections::HashMap<::prost::alloc::string::String, ::prost_types::FieldMask>,
/// The rollup matching
/// \[variant][google.cloud.retail.v2alpha.Product.Type.VARIANT\]
/// \[Product][google.cloud.retail.v2alpha.Product\] attributes. The key is one
/// of the
/// \[SearchRequest.variant_rollup_keys][google.cloud.retail.v2alpha.SearchRequest.variant_rollup_keys\].
/// The values are the merged and de-duplicated
/// \[Product][google.cloud.retail.v2alpha.Product\] attributes. Notice that
/// the rollup values are respect filter. For example, when filtering by
/// "colorFamilies:ANY(\"red\")" and rollup "colorFamilies", only "red" is
/// returned.
///
/// For textual and numerical attributes, the rollup values is a list of
/// string or double values with type
/// \[google.protobuf.ListValue][google.protobuf.ListValue\]. For example, if
/// there are two variants with colors "red" and "blue", the rollup values
/// are
///
/// { key: "colorFamilies"
/// value {
/// list_value {
/// values { string_value: "red" }
/// values { string_value: "blue" }
/// }
/// }
/// }
///
/// For \[FulfillmentInfo][google.cloud.retail.v2alpha.FulfillmentInfo\], the
/// rollup values is a double value with type
/// \[google.protobuf.Value][google.protobuf.Value\]. For example,
/// `{key: "pickupInStore.store1" value { number_value: 10 }}` means a there
/// are 10 variants in this product are available in the store "store1".
#[prost(map = "string, message", tag = "5")]
pub variant_rollup_values:
::std::collections::HashMap<::prost::alloc::string::String, ::prost_types::Value>,
}
/// A facet result.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Facet {
/// The key for this facet. E.g., "colorFamilies" or "price" or
/// "attributes.attr1".
#[prost(string, tag = "1")]
pub key: ::prost::alloc::string::String,
/// The facet values for this field.
#[prost(message, repeated, tag = "2")]
pub values: ::prost::alloc::vec::Vec<facet::FacetValue>,
/// Whether the facet is dynamically generated.
#[prost(bool, tag = "3")]
pub dynamic_facet: bool,
}
/// Nested message and enum types in `Facet`.
pub mod facet {
/// A facet value which contains value names and their count.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FacetValue {
/// Number of items that have this facet value.
#[prost(int64, tag = "3")]
pub count: i64,
/// A facet value which contains values.
#[prost(oneof = "facet_value::FacetValue", tags = "1, 2")]
pub facet_value: ::core::option::Option<facet_value::FacetValue>,
}
/// Nested message and enum types in `FacetValue`.
pub mod facet_value {
/// A facet value which contains values.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum FacetValue {
/// Text value of a facet, such as "Black" for facet "colorFamilies".
#[prost(string, tag = "1")]
Value(::prost::alloc::string::String),
/// Interval value for a facet, such as [10, 20) for facet "price".
#[prost(message, tag = "2")]
Interval(super::super::super::Interval),
}
}
}
/// Information describing query expansion including whether expansion has
/// occurred.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryExpansionInfo {
/// Bool describing whether query expansion has occurred.
#[prost(bool, tag = "1")]
pub expanded_query: bool,
/// Number of pinned results. This field will only be set when expansion
/// happens and
/// \[SearchRequest.QueryExpansionSpec.pin_unexpanded_results][google.cloud.retail.v2alpha.SearchRequest.QueryExpansionSpec.pin_unexpanded_results\]
/// is set to true.
#[prost(int64, tag = "2")]
pub pinned_result_count: i64,
}
}
#[doc = r" Generated client implementations."]
pub mod search_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Service for search."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
#[derive(Debug, Clone)]
pub struct SearchServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> SearchServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> SearchServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
SearchServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Performs a search."]
#[doc = ""]
#[doc = " This feature is only available for users who have Retail Search enabled."]
#[doc = " Please submit a form [here](https://cloud.google.com/contact) to contact"]
#[doc = " cloud sales if you are interested in using Retail Search."]
pub async fn search(
&mut self,
request: impl tonic::IntoRequest<super::SearchRequest>,
) -> Result<tonic::Response<super::SearchResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.SearchService/Search",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for WriteUserEvent method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct WriteUserEventRequest {
/// Required. The parent catalog resource name, such as
/// `projects/1234/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. User event to write.
#[prost(message, optional, tag = "2")]
pub user_event: ::core::option::Option<UserEvent>,
}
/// Request message for CollectUserEvent method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CollectUserEventRequest {
/// Required. The parent catalog name, such as
/// `projects/1234/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. URL encoded UserEvent proto with a length limit of 2,000,000
/// characters.
#[prost(string, tag = "2")]
pub user_event: ::prost::alloc::string::String,
/// The URL including cgi-parameters but excluding the hash fragment with a
/// length limit of 5,000 characters. This is often more useful than the
/// referer URL, because many browsers only send the domain for 3rd party
/// requests.
#[prost(string, tag = "3")]
pub uri: ::prost::alloc::string::String,
/// The event timestamp in milliseconds. This prevents browser caching of
/// otherwise identical get requests. The name is abbreviated to reduce the
/// payload bytes.
#[prost(int64, tag = "4")]
pub ets: i64,
}
/// Request message for RejoinUserEvents method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RejoinUserEventsRequest {
/// Required. The parent catalog resource name, such as
/// `projects/1234/locations/global/catalogs/default_catalog`.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The type of the user event rejoin to define the scope and range of the user
/// events to be rejoined with the latest product catalog. Defaults to
/// USER_EVENT_REJOIN_SCOPE_UNSPECIFIED if this field is not set, or set to an
/// invalid integer value.
#[prost(enumeration = "rejoin_user_events_request::UserEventRejoinScope", tag = "2")]
pub user_event_rejoin_scope: i32,
}
/// Nested message and enum types in `RejoinUserEventsRequest`.
pub mod rejoin_user_events_request {
/// The scope of user events to be rejoined with the latest product catalog.
/// If the rejoining aims at reducing number of unjoined events, set
/// UserEventRejoinScope to UNJOINED_EVENTS.
/// If the rejoining aims at correcting product catalog information in joined
/// events, set UserEventRejoinScope to JOINED_EVENTS.
/// If all events needs to be rejoined, set UserEventRejoinScope to
/// USER_EVENT_REJOIN_SCOPE_UNSPECIFIED.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum UserEventRejoinScope {
/// Rejoin all events with the latest product catalog, including both joined
/// events and unjoined events.
Unspecified = 0,
/// Only rejoin joined events with the latest product catalog.
JoinedEvents = 1,
/// Only rejoin unjoined events with the latest product catalog.
UnjoinedEvents = 2,
}
}
/// Response message for RejoinUserEvents method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RejoinUserEventsResponse {
/// Number of user events that were joined with latest product catalog.
#[prost(int64, tag = "1")]
pub rejoined_user_events_count: i64,
}
/// Metadata for RejoinUserEvents method.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RejoinUserEventsMetadata {}
#[doc = r" Generated client implementations."]
pub mod user_event_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Service for ingesting end user actions on the customer website."]
#[derive(Debug, Clone)]
pub struct UserEventServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> UserEventServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> UserEventServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
UserEventServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Writes a single user event."]
pub async fn write_user_event(
&mut self,
request: impl tonic::IntoRequest<super::WriteUserEventRequest>,
) -> Result<tonic::Response<super::UserEvent>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.UserEventService/WriteUserEvent",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Writes a single user event from the browser. This uses a GET request to"]
#[doc = " due to browser restriction of POST-ing to a 3rd party domain."]
#[doc = ""]
#[doc = " This method is used only by the Retail API JavaScript pixel and Google Tag"]
#[doc = " Manager. Users should not call this method directly."]
pub async fn collect_user_event(
&mut self,
request: impl tonic::IntoRequest<super::CollectUserEventRequest>,
) -> Result<tonic::Response<super::super::super::super::api::HttpBody>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.UserEventService/CollectUserEvent",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes permanently all user events specified by the filter provided."]
#[doc = " Depending on the number of events specified by the filter, this operation"]
#[doc = " could take hours or days to complete. To test a filter, use the list"]
#[doc = " command first."]
pub async fn purge_user_events(
&mut self,
request: impl tonic::IntoRequest<super::PurgeUserEventsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.UserEventService/PurgeUserEvents",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Bulk import of User events. Request processing might be"]
#[doc = " synchronous. Events that already exist are skipped."]
#[doc = " Use this method for backfilling historical user events."]
#[doc = ""]
#[doc = " Operation.response is of type ImportResponse. Note that it is"]
#[doc = " possible for a subset of the items to be successfully inserted."]
#[doc = " Operation.metadata is of type ImportMetadata."]
pub async fn import_user_events(
&mut self,
request: impl tonic::IntoRequest<super::ImportUserEventsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.UserEventService/ImportUserEvents",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Triggers a user event rejoin operation with latest product catalog. Events"]
#[doc = " will not be annotated with detailed product information if product is"]
#[doc = " missing from the catalog at the time the user event is ingested, and these"]
#[doc = " events are stored as unjoined events with a limited usage on training and"]
#[doc = " serving. This API can be used to trigger a 'join' operation on specified"]
#[doc = " events with latest version of product catalog. It can also be used to"]
#[doc = " correct events joined with wrong product catalog."]
pub async fn rejoin_user_events(
&mut self,
request: impl tonic::IntoRequest<super::RejoinUserEventsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.retail.v2alpha.UserEventService/RejoinUserEvents",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
| 50.588704 | 156 | 0.643171 |
d64bfe0033b94497917993b7e9c5750918cb05e9 | 2,962 | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(repr_packed)]
use std::fmt;
use std::mem;
#[repr(packed)]
#[derive(Copy, Clone)]
struct Foo1 {
bar: u8,
baz: u64
}
impl PartialEq for Foo1 {
fn eq(&self, other: &Foo1) -> bool {
self.bar == other.bar && self.baz == other.baz
}
}
impl fmt::Debug for Foo1 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let bar = self.bar;
let baz = self.baz;
f.debug_struct("Foo1")
.field("bar", &bar)
.field("baz", &baz)
.finish()
}
}
#[repr(packed(2))]
#[derive(Copy, Clone)]
struct Foo2 {
bar: u8,
baz: u64
}
impl PartialEq for Foo2 {
fn eq(&self, other: &Foo2) -> bool {
self.bar == other.bar && self.baz == other.baz
}
}
impl fmt::Debug for Foo2 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let bar = self.bar;
let baz = self.baz;
f.debug_struct("Foo2")
.field("bar", &bar)
.field("baz", &baz)
.finish()
}
}
#[repr(C, packed(4))]
#[derive(Copy, Clone)]
struct Foo4C {
bar: u8,
baz: u64
}
impl PartialEq for Foo4C {
fn eq(&self, other: &Foo4C) -> bool {
self.bar == other.bar && self.baz == other.baz
}
}
impl fmt::Debug for Foo4C {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let bar = self.bar;
let baz = self.baz;
f.debug_struct("Foo4C")
.field("bar", &bar)
.field("baz", &baz)
.finish()
}
}
pub fn main() {
let foo1s = [Foo1 { bar: 1, baz: 2 }; 10];
assert_eq!(mem::align_of::<[Foo1; 10]>(), 1);
assert_eq!(mem::size_of::<[Foo1; 10]>(), 90);
for i in 0..10 {
assert_eq!(foo1s[i], Foo1 { bar: 1, baz: 2});
}
for &foo in &foo1s {
assert_eq!(foo, Foo1 { bar: 1, baz: 2 });
}
let foo2s = [Foo2 { bar: 1, baz: 2 }; 10];
assert_eq!(mem::align_of::<[Foo2; 10]>(), 2);
assert_eq!(mem::size_of::<[Foo2; 10]>(), 100);
for i in 0..10 {
assert_eq!(foo2s[i], Foo2 { bar: 1, baz: 2});
}
for &foo in &foo2s {
assert_eq!(foo, Foo2 { bar: 1, baz: 2 });
}
let foo4s = [Foo4C { bar: 1, baz: 2 }; 10];
assert_eq!(mem::align_of::<[Foo4C; 10]>(), 4);
assert_eq!(mem::size_of::<[Foo4C; 10]>(), 120);
for i in 0..10 {
assert_eq!(foo4s[i], Foo4C { bar: 1, baz: 2});
}
for &foo in &foo4s {
assert_eq!(foo, Foo4C { bar: 1, baz: 2 });
}
}
| 22.439394 | 69 | 0.536462 |
56ded7300234b0a93e98badb274c4d6193b6bb2b | 7,564 | use crate::app::App;
use crate::colors::ColorScheme;
use crate::helpers::ID;
use crate::render::{DrawOptions, Renderable, OUTLINE_THICKNESS};
use ezgui::{Color, Drawable, GeomBatch, GfxCtx, Line, Prerender, Text};
use geom::{Angle, ArrowCap, Distance, PolyLine, Polygon, Pt2D};
use map_model::{Map, TurnType};
use sim::{CarID, CarStatus, DrawCarInput, VehicleType};
const CAR_WIDTH: Distance = Distance::const_meters(1.75);
pub struct DrawCar {
pub id: CarID,
body: PolyLine,
body_polygon: Polygon,
zorder: isize,
draw_default: Drawable,
}
impl DrawCar {
pub fn new(input: DrawCarInput, map: &Map, prerender: &Prerender, cs: &ColorScheme) -> DrawCar {
let mut draw_default = GeomBatch::new();
// Wheels
for side in vec![
input.body.shift_right(CAR_WIDTH / 2.0).unwrap(),
input.body.shift_left(CAR_WIDTH / 2.0).unwrap(),
] {
let len = side.length();
if len <= Distance::meters(2.0) {
// The original body may be fine, but sometimes shifting drastically shortens the
// length due to miter threshold chopping. Just give up on wheels in that case
// instead of crashing.
continue;
}
draw_default.push(
cs.bike_frame,
side.exact_slice(Distance::meters(0.5), Distance::meters(1.0))
.make_polygons(OUTLINE_THICKNESS / 2.0),
);
draw_default.push(
cs.bike_frame,
side.exact_slice(len - Distance::meters(2.0), len - Distance::meters(1.5))
.make_polygons(OUTLINE_THICKNESS / 2.0),
);
}
let body_polygon = {
let len = input.body.length();
let front_corner = len - Distance::meters(1.0);
let thick_line = input
.body
.exact_slice(Distance::ZERO, front_corner)
.make_polygons(CAR_WIDTH);
let (corner_pt, corner_angle) = input.body.dist_along(front_corner);
let (tip_pt, tip_angle) = input.body.dist_along(len);
let front = Polygon::new(&vec![
corner_pt.project_away(CAR_WIDTH / 2.0, corner_angle.rotate_degs(90.0)),
corner_pt.project_away(CAR_WIDTH / 2.0, corner_angle.rotate_degs(-90.0)),
tip_pt.project_away(CAR_WIDTH / 4.0, tip_angle.rotate_degs(-90.0)),
tip_pt.project_away(CAR_WIDTH / 4.0, tip_angle.rotate_degs(90.0)),
]);
front.union(thick_line)
};
draw_default.push(zoomed_color_car(&input, cs), body_polygon.clone());
if input.status == CarStatus::Parked {
draw_default.append(
GeomBatch::mapspace_svg(prerender, "../data/system/assets/map/parked_car.svg")
.scale(0.01)
.centered_on(input.body.middle()),
);
}
{
let arrow_len = 0.8 * CAR_WIDTH;
let arrow_thickness = Distance::meters(0.5);
if let Some(t) = input.waiting_for_turn {
match map.get_t(t).turn_type {
TurnType::Left => {
let (pos, angle) = input
.body
.dist_along(input.body.length() - Distance::meters(2.5));
draw_default.push(
cs.turn_arrow,
PolyLine::new(vec![
pos.project_away(arrow_len / 2.0, angle.rotate_degs(90.0)),
pos.project_away(arrow_len / 2.0, angle.rotate_degs(-90.0)),
])
.make_arrow(arrow_thickness, ArrowCap::Triangle)
.unwrap(),
);
}
TurnType::Right => {
let (pos, angle) = input
.body
.dist_along(input.body.length() - Distance::meters(2.5));
draw_default.push(
cs.turn_arrow,
PolyLine::new(vec![
pos.project_away(arrow_len / 2.0, angle.rotate_degs(-90.0)),
pos.project_away(arrow_len / 2.0, angle.rotate_degs(90.0)),
])
.make_arrow(arrow_thickness, ArrowCap::Triangle)
.unwrap(),
);
}
TurnType::Straight | TurnType::LaneChangeLeft | TurnType::LaneChangeRight => {}
TurnType::Crosswalk | TurnType::SharedSidewalkCorner => unreachable!(),
}
// Always draw the brake light
let (pos, angle) = input.body.dist_along(Distance::meters(0.5));
// TODO rounded
let window_length_gap = Distance::meters(0.2);
let window_thickness = Distance::meters(0.3);
draw_default.push(
cs.brake_light,
thick_line_from_angle(
window_thickness,
CAR_WIDTH - window_length_gap * 2.0,
pos.project_away(
CAR_WIDTH / 2.0 - window_length_gap,
angle.rotate_degs(-90.0),
),
angle.rotate_degs(90.0),
),
);
}
}
if let Some(line) = input.label {
// TODO Would rotation make any sense? Or at least adjust position/size while turning.
// Buses are a constant length, so hardcoding this is fine.
draw_default.append(
Text::from(Line(line).fg(cs.bus_label))
.render_to_batch(prerender)
.scale(0.07)
.centered_on(input.body.dist_along(Distance::meters(9.0)).0),
);
}
DrawCar {
id: input.id,
body: input.body,
body_polygon,
zorder: input.on.get_zorder(map),
draw_default: prerender.upload(draw_default),
}
}
}
impl Renderable for DrawCar {
fn get_id(&self) -> ID {
ID::Car(self.id)
}
fn draw(&self, g: &mut GfxCtx, _: &App, _: &DrawOptions) {
g.redraw(&self.draw_default);
}
fn get_outline(&self, _: &Map) -> Polygon {
self.body
.to_thick_boundary(CAR_WIDTH, OUTLINE_THICKNESS)
.unwrap_or_else(|| self.body_polygon.clone())
}
fn contains_pt(&self, pt: Pt2D, _: &Map) -> bool {
self.body_polygon.contains_pt(pt)
}
fn get_zorder(&self) -> isize {
self.zorder
}
}
fn thick_line_from_angle(
thickness: Distance,
line_length: Distance,
pt: Pt2D,
angle: Angle,
) -> Polygon {
let pt2 = pt.project_away(line_length, angle);
// Shouldn't ever fail for a single line
PolyLine::new(vec![pt, pt2]).make_polygons(thickness)
}
fn zoomed_color_car(input: &DrawCarInput, cs: &ColorScheme) -> Color {
if input.id.1 == VehicleType::Bus {
cs.bus_body
} else {
match input.status {
CarStatus::Moving => cs.rotating_color_agents(input.id.0),
CarStatus::Parked => cs.rotating_color_agents(input.id.0).fade(1.5),
}
}
}
| 37.261084 | 100 | 0.509519 |
e254f083859cfa6995f114c8f3288eee03727ce7 | 6,366 | use actix_web::{web, HttpResponse, Result};
use handlebars::Handlebars;
use serde::Deserialize;
use crate::appconfig::Settings;
use crate::error::AppError;
use crate::mud_runner_saves::MudrunnerSave;
use crate::snowrunner::SnowRunnerProfile;
use crate::SETTINGS;
#[get("/check")]
pub fn check() -> HttpResponse {
HttpResponse::Ok().finish()
}
#[post("/exit")]
pub fn exit() -> HttpResponse {
std::process::exit(0);
}
#[get("/")]
pub async fn index(hb: web::Data<Handlebars<'_>>) -> HttpResponse {
let color = match SETTINGS.lock() {
Ok(s) => s.get_color(),
Err(_) => return HttpResponse::InternalServerError().finish(),
};
let body = match hb.render("base", &color) {
Ok(b) => b,
Err(_) => return HttpResponse::InternalServerError().finish(),
};
HttpResponse::Ok().body(body)
}
#[get("/overview")]
pub async fn overview(hb: web::Data<Handlebars<'_>>) -> HttpResponse {
match hb.render("index", &()) {
Ok(b) => HttpResponse::Ok().body(b),
Err(_) => HttpResponse::InternalServerError().finish(),
}
}
#[get("/mud-runner")]
pub async fn mud_runner(hb: web::Data<Handlebars<'_>>) -> Result<HttpResponse> {
let avail_saves = MudrunnerSave::get_available_mudrunner_saves()?;
match hb.render("mudrunner", &avail_saves) {
Ok(b) => Ok(HttpResponse::Ok().body(b)),
Err(_) => Ok(HttpResponse::InternalServerError().finish()),
}
}
#[derive(Deserialize)]
pub struct MudrunnerSaveRequest {
original_name: String,
user_name: String,
}
#[derive(Deserialize)]
pub struct MudrunnerRestoreRequest {
user_name: String,
}
#[derive(Deserialize)]
pub struct SnowrunnerUpdateAliasRequest {
new_alias: String,
uuid: String,
}
#[post("/mud-runner/save")]
pub async fn store_mudrunner_save(
params: web::Query<MudrunnerSaveRequest>,
) -> Result<HttpResponse, AppError> {
MudrunnerSave::archive_savegame(¶ms.user_name, ¶ms.original_name)?;
Ok(HttpResponse::Ok().finish())
}
#[put("/mud-runner/profile")]
pub async fn restore_mud_runner_save(
params: web::Query<MudrunnerRestoreRequest>,
) -> Result<HttpResponse, AppError> {
if let Ok(_) = MudrunnerSave::restore_savegame(¶ms.user_name) {
Ok(HttpResponse::Ok().finish())
} else {
Ok(HttpResponse::InternalServerError().finish())
}
}
#[put("/snow-runner/update-alias")]
pub async fn update_snow_runner_profile_alias(
params: web::Query<SnowrunnerUpdateAliasRequest>,
) -> Result<HttpResponse, AppError> {
let mut profile = SnowRunnerProfile::get_snowrunner_profile(¶ms.uuid)?;
if let Ok(_) = profile.update_profile_name(¶ms.new_alias) {
Ok(HttpResponse::Ok().finish())
} else {
Ok(HttpResponse::InternalServerError().finish())
}
}
// /* *** SNOW RUNNER *** */
#[get("/snow-runner")]
pub async fn snow_runner(hb: web::Data<Handlebars<'_>>) -> Result<HttpResponse> {
let profiles = SnowRunnerProfile::get_available_snowrunner_profiles()?;
match hb.render("snowrunner", &profiles) {
Ok(b) => Ok(HttpResponse::Ok().body(b)),
Err(e) => Ok(HttpResponse::InternalServerError().body(e.to_string())),
}
}
#[derive(Deserialize)]
pub struct SnowRunnerProfileRequest {
id: String,
}
#[get("/snow-runner/profile")]
pub async fn get_snowrunner_profile(
hb: web::Data<Handlebars<'_>>,
params: web::Query<SnowRunnerProfileRequest>,
) -> Result<HttpResponse, AppError> {
let profile = SnowRunnerProfile::get_snowrunner_profile(¶ms.id)?;
let saves = profile.get_archived_snowrunner_saves();
match hb.render("snowrunner-saves", &saves) {
Ok(b) => Ok(HttpResponse::Ok().body(b)),
Err(_) => Ok(HttpResponse::InternalServerError().finish()),
}
}
#[get("/mud-runner/profile")]
pub async fn get_mudrunner_profile(hb: web::Data<Handlebars<'_>>) -> Result<HttpResponse, AppError> {
match MudrunnerSave::get_archived_mudrunner_saves() {
Ok(saves) => {
match hb.render("mudrunner-saves", &saves) {
Ok(b) => {
Ok(HttpResponse::Ok().body(b))
}
Err(e) => {
dbg!(&e);
Ok(HttpResponse::InternalServerError().finish())
}
}
}
Err(e) => {
dbg!(&e);
Ok(HttpResponse::InternalServerError().finish())
}
}
}
#[derive(Deserialize)]
pub struct SnowRunnerProfileSaveRequest {
id: String,
name: String,
}
#[post("/snow-runner/profile")]
pub async fn store_snowrunner_profile(
params: web::Query<SnowRunnerProfileSaveRequest>,
) -> Result<HttpResponse, AppError> {
let mut profile = SnowRunnerProfile::get_snowrunner_profile(¶ms.id)?;
profile.archive_savegame(¶ms.name)?;
Ok(HttpResponse::Ok().finish())
}
#[derive(Deserialize)]
pub struct SnowRunnerManageSavedRequest {
id: String, // profile uuid
savegame: String, // savegame uuid
}
#[delete("/snow-runner/profile")]
pub async fn delete_snow_runner_save(
params: web::Query<SnowRunnerManageSavedRequest>,
) -> Result<HttpResponse, AppError> {
let mut profile = SnowRunnerProfile::get_snowrunner_profile(¶ms.id)?;
profile.delete_archived_savegame(¶ms.savegame)?;
Ok(HttpResponse::Ok().finish())
}
#[put("/snow-runner/profile")]
pub async fn restore_snow_runner_save(
params: web::Query<SnowRunnerManageSavedRequest>,
) -> Result<HttpResponse, AppError> {
let mut profile = SnowRunnerProfile::get_snowrunner_profile(¶ms.id)?;
profile.restore_backup(¶ms.savegame)?;
Ok(HttpResponse::Ok().finish())
}
#[get("/settings")]
pub fn settings() -> HttpResponse {
match SETTINGS.lock() {
Ok(s) => HttpResponse::Ok().json(s.clone()),
Err(_) => HttpResponse::InternalServerError().finish(),
}
}
#[post("/settings")]
pub async fn save_settings(settings_json: web::Json<Settings>) -> HttpResponse {
match settings_json.store() {
Ok(_) => {}
Err(_) => return HttpResponse::InternalServerError().finish(),
};
match match SETTINGS.lock() {
Ok(mut s) => s.reload(),
Err(_) => return HttpResponse::InternalServerError().finish(),
} {
Ok(_) => HttpResponse::Ok().finish(),
Err(_) => HttpResponse::InternalServerError().finish(),
}
}
| 30.028302 | 101 | 0.647502 |
2882bdaa21892753a6e4b391c700837803516a05 | 8,049 | #[allow(unused_macros)]
macro_rules! auth_module (($auth_name:ident,
$verify_name:ident,
$keybytes:expr,
$tagbytes:expr) => (
use libc::c_ulonglong;
use crate::randombytes::randombytes_into;
/// Number of bytes in a `Key`.
pub const KEYBYTES: usize = $keybytes;
/// Number of bytes in a `Tag`.
pub const TAGBYTES: usize = $tagbytes;
new_type! {
/// Authentication `Key`
///
/// When a `Key` goes out of scope its contents
/// will be zeroed out
secret Key(KEYBYTES);
}
new_type! {
/// Authentication `Tag`
///
/// The tag implements the traits `PartialEq` and `Eq` using constant-time
/// comparison functions. See `sodiumoxide::utils::memcmp`
public Tag(TAGBYTES);
}
/// `gen_key()` randomly generates a key for authentication
///
/// THREAD SAFETY: `gen_key()` is thread-safe provided that you have
/// called `sodiumoxide::init()` once before using any other function
/// from sodiumoxide.
pub fn gen_key() -> Key {
let mut k = [0; KEYBYTES];
randombytes_into(&mut k);
Key(k)
}
/// `authenticate()` authenticates a message `m` using a secret key `k`.
/// The function returns an authenticator tag.
pub fn authenticate(m: &[u8],
&Key(ref k): &Key) -> Tag {
unsafe {
let mut tag = [0; TAGBYTES];
$auth_name(&mut tag,
m.as_ptr(),
m.len() as c_ulonglong,
k);
Tag(tag)
}
}
/// `verify()` returns `true` if `tag` is a correct authenticator of message `m`
/// under a secret key `k`. Otherwise it returns false.
pub fn verify(&Tag(ref tag): &Tag, m: &[u8],
&Key(ref k): &Key) -> bool {
unsafe {
$verify_name(tag,
m.as_ptr(),
m.len() as c_ulonglong,
k) == 0
}
}
#[cfg(test)]
mod test_m {
use crate::randombytes::randombytes;
use super::*;
#[test]
fn test_auth_verify() {
for i in 0..256usize {
let k = gen_key();
let m = randombytes(i);
let tag = authenticate(&m, &k);
assert!(verify(&tag, &m, &k));
}
}
#[test]
fn test_auth_verify_tamper() {
for i in 0..32usize {
let k = gen_key();
let mut m = randombytes(i);
let Tag(mut tagbuf) = authenticate(&mut m, &k);
for j in 0..m.len() {
m[j] ^= 0x20;
assert!(!verify(&Tag(tagbuf), &mut m, &k));
m[j] ^= 0x20;
}
for j in 0..tagbuf.len() {
tagbuf[j] ^= 0x20;
assert!(!verify(&Tag(tagbuf), &mut m, &k));
tagbuf[j] ^= 0x20;
}
}
}
#[cfg(feature = "serde")]
#[test]
fn test_serialisation() {
use crate::test_utils::round_trip;
for i in 0..256usize {
let k = gen_key();
let m = randombytes(i);
let tag = authenticate(&m, &k);
round_trip(k);
round_trip(tag);
}
}
}
#[cfg(feature = "benchmarks")]
#[cfg(test)]
mod bench_m {
extern crate test;
use crate::randombytes::randombytes;
use super::*;
const BENCH_SIZES: [usize; 14] = [0, 1, 2, 4, 8, 16, 32, 64,
128, 256, 512, 1024, 2048, 4096];
#[bench]
fn bench_auth(b: &mut test::Bencher) {
let k = gen_key();
let ms: Vec<Vec<u8>> = BENCH_SIZES.iter().map(|s| {
randombytes(*s)
}).collect();
b.iter(|| {
for m in ms.iter() {
authenticate(&m, &k);
}
});
}
#[bench]
fn bench_verify(b: &mut test::Bencher) {
let k = gen_key();
let ms: Vec<Vec<u8>> = BENCH_SIZES.iter().map(|s| {
randombytes(*s)
}).collect();
let tags: Vec<Tag> = ms.iter().map(|m| {
authenticate(&m, &k)
}).collect();
b.iter(|| {
for (m, t) in ms.iter().zip(tags.iter()) {
verify(t, &m, &k);
}
});
}
}
));
/// Macro for defining streaming authenticator tag computation types and functions.
///
/// Parameters:
/// $state_name - The authenticator state type.
/// SAFETY NOTE: This needs to be a type that does not define a `Drop`
/// implementation, otherwise undefined behaviour will occur.
/// $init_name - A function `f(s: *mut $state_name, k: *u8, klen: size_t)` that initializes
/// a state with a key.
/// $update_name - A function `f(s: *mut $state_name, m: *u8, mlen: size_t)` that updates
/// a state with a message chunk.
/// $final_name - A function `f(s: *mut $state_name, t: *u8)` that computes an authenticator tag of length $tagbytes from a $state_name.
/// $tagbytes - The number of bytes in an authenticator tag.
#[allow(unused_macros)]
macro_rules! auth_state (($state_name:ident,
$init_name:ident,
$update_name:ident,
$final_name:ident,
$tagbytes:expr) => (
use std::mem;
use ffi;
/// Authentication `State`
///
/// State for multi-part (streaming) authenticator tag (HMAC) computation.
///
/// When a `State` goes out of scope its contents will be zeroed out.
///
/// NOTE: the streaming interface takes variable length keys, as opposed to the
/// simple interface which takes a fixed length key. The streaming interface also does not
/// define its own `Key` type, instead using slices for its `init()` method.
/// The caller of the functions is responsible for zeroing out the key after it's been used
/// (in contrast to the simple interface which defines a `Drop` implementation for `Key`).
///
/// NOTE: these functions are specific to `libsodium` and do not exist in `NaCl`.
#[must_use]
pub struct State($state_name);
impl Drop for State {
fn drop(&mut self) {
let &mut State(ref mut s) = self;
unsafe {
let sp: *mut $state_name = s;
ffi::sodium_memzero(sp as *mut u8, mem::size_of_val(s));
}
}
}
impl State {
/// `init()` initializes an authentication structure using a secret key 'k'.
pub fn init(k: &[u8]) -> State {
unsafe {
let mut s = mem::uninitialized();
$init_name(&mut s, k.as_ptr(), k.len());
State(s)
}
}
/// `update()` can be called more than once in order to compute the authenticator
/// from sequential chunks of the message.
pub fn update(&mut self, in_: &[u8]) {
let &mut State(ref mut state) = self;
unsafe {
$update_name(state, in_.as_ptr(), in_.len() as c_ulonglong);
}
}
/// `finalize()` finalizes the authenticator computation and returns a `Tag`.
pub fn finalize(&mut self) -> Tag {
unsafe {
let &mut State(ref mut state) = self;
let mut tag = [0; $tagbytes as usize];
$final_name(state, &mut tag);
Tag(tag)
}
}
}
#[cfg(test)]
mod test_s {
use crate::randombytes::randombytes;
use super::*;
#[test]
fn test_auth_eq_auth_state() {
for i in 0..256usize {
let k = gen_key();
let m = randombytes(i);
let tag = authenticate(&m, &k);
let mut state = State::init(&k[..]);
state.update(&m);
let tag2 = state.finalize();
assert_eq!(tag, tag2);
}
}
#[test]
fn test_auth_eq_auth_state_chunked() {
for i in 0..256usize {
let k = gen_key();
let m = randombytes(i);
let tag = authenticate(&m, &k);
let mut state = State::init(&k[..]);
for c in m.chunks(1) {
state.update(c);
}
let tag2 = state.finalize();
assert_eq!(tag, tag2);
}
}
}
));
| 29.591912 | 155 | 0.525158 |
563d86273cb6dcf468b61d646aa8e0b69263cd0f | 6,744 | extern crate framework;
#[macro_use]
extern crate log;
extern crate pretty_env_logger;
extern crate sfml;
extern crate toml;
pub fn visualizer(
config: ::std::sync::Arc<toml::Value>,
audio_info: ::std::sync::Arc<::std::sync::RwLock<framework::AudioInfo>>,
mut run_mode: framework::RunMode,
) {
use sfml::graphics::RenderTarget;
let display_columns = config
.get("DISPLAY_COLUMNS")
.map(|v| {
v.as_integer().expect("DISPLAY_COLUMNS must be an integer")
})
.unwrap_or(50) as usize;
info!("DISPLAY_COLUMNS = {}", display_columns);
let window_height = config
.get("WINDOW_HEIGHT")
.map(|v| {
v.as_integer().expect("WINDOW_HEIGHT must be an integer")
})
.unwrap_or(720) as u32;
info!("WINDOW_HEIGHT = {}", window_height);
let window_width = config
.get("WINDOW_WIDTH")
.map(|v| v.as_integer().expect("WINDOW_WIDTH must be an integer"))
.unwrap_or(1280) as u32;
info!("WINDOW_WIDTH = {}", window_width);
let mut settings = sfml::window::ContextSettings::default();
settings.antialiasing_level = 8;
let mut window = sfml::graphics::RenderWindow::new(
sfml::window::VideoMode::new(window_width, window_height, 32),
"PulseAudio Visualizer",
sfml::window::Style::NONE,
&settings,
);
let mut render_texture = sfml::graphics::RenderTexture::new(window_width, window_height, true)
.unwrap();
render_texture.set_view(&sfml::graphics::View::new(
sfml::system::Vector2::new(0.0, 0.0),
sfml::system::Vector2::new(2.2, 2.2),
));
let mut vertex_array_left_top = Vec::with_capacity(display_columns);
let mut vertex_array_right_top = Vec::with_capacity(display_columns);
let mut vertex_array_left_bottom = Vec::with_capacity(display_columns);
let mut vertex_array_right_bottom = Vec::with_capacity(display_columns);
let mut vertex_array_left_ttb = Vec::with_capacity(display_columns * 2);
let mut vertex_array_right_ttb = Vec::with_capacity(display_columns * 2);
for i in 0..display_columns {
vertex_array_left_top.push(sfml::graphics::Vertex::with_pos(
(1.0 - (i as f32 / display_columns as f32) - 1.0, 0.0),
));
vertex_array_right_top.push(sfml::graphics::Vertex::with_pos(
((i as f32 / display_columns as f32), 0.0),
));
vertex_array_left_bottom.push(sfml::graphics::Vertex::with_pos(
(1.0 - (i as f32 / display_columns as f32) - 1.0, 0.0),
));
vertex_array_right_bottom.push(sfml::graphics::Vertex::with_pos(
((i as f32 / display_columns as f32), 0.0),
));
vertex_array_left_ttb.push(sfml::graphics::Vertex::with_pos(
(1.0 - (i as f32 / display_columns as f32) - 1.0, 0.0),
));
vertex_array_left_ttb.push(sfml::graphics::Vertex::with_pos(
(1.0 - (i as f32 / display_columns as f32) - 1.0, 0.0),
));
vertex_array_right_ttb.push(sfml::graphics::Vertex::with_pos(
((i as f32 / display_columns as f32), 0.0),
));
vertex_array_right_ttb.push(sfml::graphics::Vertex::with_pos(
((i as f32 / display_columns as f32), 0.0),
));
}
'mainloop: loop {
// Event handling
while let Some(ref ev) = window.poll_event() {
match *ev {
sfml::window::Event::Closed => break 'mainloop,
sfml::window::Event::KeyReleased { code: sfml::window::Key::Escape, .. } => {
break 'mainloop
}
_ => (),
}
}
{
let ai = audio_info.read().expect("Couldn't read audio info");
render_texture.clear(&sfml::graphics::Color::rgb(0x18, 0x15, 0x11));
let factor = 1.0;
for si in 0..display_columns {
let reshape = 1.0; // - 1.0 / ((si as f32) / (display_columns as f32) * 10.0).exp();
let vl = ai.columns_left[si] * reshape;
let size = vl * factor;
vertex_array_left_top[si].position.y = -size / 2.0;
vertex_array_left_bottom[si].position.y = size / 2.0;
vertex_array_left_ttb[si * 2].position.y = -size / 2.0;
vertex_array_left_ttb[si * 2 + 1].position.y = size / 2.0;
let vr = ai.columns_right[si] * reshape;
let size = vr * factor;
vertex_array_right_top[si].position.y = -size / 2.0;
vertex_array_right_bottom[si].position.y = size / 2.0;
vertex_array_right_ttb[si * 2].position.y = -size / 2.0;
vertex_array_right_ttb[si * 2 + 1].position.y = size / 2.0;
}
}
render_texture.draw_primitives(
&vertex_array_left_top[1..],
sfml::graphics::PrimitiveType::LineStrip,
sfml::graphics::RenderStates::default(),
);
render_texture.draw_primitives(
&vertex_array_right_top[1..],
sfml::graphics::PrimitiveType::LineStrip,
sfml::graphics::RenderStates::default(),
);
render_texture.draw_primitives(
&vertex_array_left_bottom[1..],
sfml::graphics::PrimitiveType::LineStrip,
sfml::graphics::RenderStates::default(),
);
render_texture.draw_primitives(
&vertex_array_right_bottom[1..],
sfml::graphics::PrimitiveType::LineStrip,
sfml::graphics::RenderStates::default(),
);
render_texture.draw_primitives(
&vertex_array_left_ttb,
sfml::graphics::PrimitiveType::Lines,
sfml::graphics::RenderStates::default(),
);
render_texture.draw_primitives(
&vertex_array_right_ttb,
sfml::graphics::PrimitiveType::Lines,
sfml::graphics::RenderStates::default(),
);
render_texture.display();
window.clear(&sfml::graphics::Color::rgb(0x18, 0x15, 0x11));
let sprite = sfml::graphics::Sprite::with_texture(render_texture.texture());
window.draw(&sprite);
window.display();
if let framework::RunMode::Rendering(ref mut render_info) = run_mode {
let img = render_texture.texture().copy_to_image().unwrap();
img.save_to_file(&format!(
"{}/{:06}.png",
render_info.outdir,
render_info.frame
));
}
framework::sleep(&mut run_mode);
}
}
fn main() {
pretty_env_logger::init().unwrap();
framework::start("configs/sfml.toml", visualizer);
}
| 37.054945 | 100 | 0.581999 |
6734979230513b024e2bb9b59b322abbb2607cb2 | 1,772 | use std::sync::Arc;
use crate::{array::FromFfi, bitmap::align, error::Result, ffi};
use super::super::{ffi::ToFfi, Array, Offset};
use super::ListArray;
unsafe impl<O: Offset> ToFfi for ListArray<O> {
fn buffers(&self) -> Vec<Option<std::ptr::NonNull<u8>>> {
vec![
self.validity.as_ref().map(|x| x.as_ptr()),
Some(self.offsets.as_ptr().cast::<u8>()),
]
}
fn children(&self) -> Vec<Arc<dyn Array>> {
vec![self.values.clone()]
}
fn offset(&self) -> Option<usize> {
let offset = self.offsets.offset();
if let Some(bitmap) = self.validity.as_ref() {
if bitmap.offset() == offset {
Some(offset)
} else {
None
}
} else {
Some(offset)
}
}
fn to_ffi_aligned(&self) -> Self {
let offset = self.offsets.offset();
let validity = self.validity.as_ref().map(|bitmap| {
if bitmap.offset() == offset {
bitmap.clone()
} else {
align(bitmap, offset)
}
});
Self {
data_type: self.data_type.clone(),
validity,
offsets: self.offsets.clone(),
values: self.values.clone(),
}
}
}
impl<O: Offset, A: ffi::ArrowArrayRef> FromFfi<A> for ListArray<O> {
unsafe fn try_from_ffi(array: A) -> Result<Self> {
let data_type = array.data_type().clone();
let validity = unsafe { array.validity() }?;
let offsets = unsafe { array.buffer::<O>(1) }?;
let child = unsafe { array.child(0)? };
let values = ffi::try_from(child)?.into();
Ok(Self::from_data(data_type, offsets, values, validity))
}
}
| 27.6875 | 68 | 0.516366 |
c1da77a6668f51b513ab79d7428a87aa1bcebd3d | 4,807 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "tempdir", reason = "needs an RFC before stabilization")]
#![deprecated(since = "1.0.0",
reason = "use the `tempdir` crate from crates.io instead")]
#![allow(deprecated)]
use prelude::v1::*;
use env;
use io::{self, Error, ErrorKind};
use fs;
use path::{self, PathBuf, AsPath};
use rand::{thread_rng, Rng};
/// A wrapper for a path to temporary directory implementing automatic
/// scope-based deletion.
pub struct TempDir {
path: Option<PathBuf>,
}
// How many times should we (re)try finding an unused random name? It should be
// enough that an attacker will run out of luck before we run out of patience.
const NUM_RETRIES: u32 = 1 << 31;
// How many characters should we include in a random file name? It needs to
// be enough to dissuade an attacker from trying to preemptively create names
// of that length, but not so huge that we unnecessarily drain the random number
// generator of entropy.
const NUM_RAND_CHARS: uint = 12;
impl TempDir {
/// Attempts to make a temporary directory inside of `tmpdir` whose name
/// will have the prefix `prefix`. The directory will be automatically
/// deleted once the returned wrapper is destroyed.
///
/// If no directory can be created, `Err` is returned.
#[allow(deprecated)] // rand usage
pub fn new_in<P: AsPath + ?Sized>(tmpdir: &P, prefix: &str)
-> io::Result<TempDir> {
let storage;
let mut tmpdir = tmpdir.as_path();
if !tmpdir.is_absolute() {
let cur_dir = try!(env::current_dir());
storage = cur_dir.join(tmpdir);
tmpdir = &storage;
// return TempDir::new_in(&cur_dir.join(tmpdir), prefix);
}
let mut rng = thread_rng();
for _ in 0..NUM_RETRIES {
let suffix: String = rng.gen_ascii_chars().take(NUM_RAND_CHARS).collect();
let leaf = if prefix.len() > 0 {
format!("{}.{}", prefix, suffix)
} else {
// If we're given an empty string for a prefix, then creating a
// directory starting with "." would lead to it being
// semi-invisible on some systems.
suffix
};
let path = tmpdir.join(&leaf);
match fs::create_dir(&path) {
Ok(_) => return Ok(TempDir { path: Some(path) }),
Err(ref e) if e.kind() == ErrorKind::PathAlreadyExists => {}
Err(e) => return Err(e)
}
}
Err(Error::new(ErrorKind::PathAlreadyExists,
"too many temporary directories already exist",
None))
}
/// Attempts to make a temporary directory inside of `env::temp_dir()` whose
/// name will have the prefix `prefix`. The directory will be automatically
/// deleted once the returned wrapper is destroyed.
///
/// If no directory can be created, `Err` is returned.
#[allow(deprecated)]
pub fn new(prefix: &str) -> io::Result<TempDir> {
TempDir::new_in(&env::temp_dir(), prefix)
}
/// Unwrap the wrapped `std::path::Path` from the `TempDir` wrapper.
/// This discards the wrapper so that the automatic deletion of the
/// temporary directory is prevented.
pub fn into_path(mut self) -> PathBuf {
self.path.take().unwrap()
}
/// Access the wrapped `std::path::Path` to the temporary directory.
pub fn path(&self) -> &path::Path {
self.path.as_ref().unwrap()
}
/// Close and remove the temporary directory
///
/// Although `TempDir` removes the directory on drop, in the destructor
/// any errors are ignored. To detect errors cleaning up the temporary
/// directory, call `close` instead.
pub fn close(mut self) -> io::Result<()> {
self.cleanup_dir()
}
fn cleanup_dir(&mut self) -> io::Result<()> {
match self.path {
Some(ref p) => fs::remove_dir_all(p),
None => Ok(())
}
}
}
impl Drop for TempDir {
fn drop(&mut self) {
let _ = self.cleanup_dir();
}
}
// the tests for this module need to change the path using change_dir,
// and this doesn't play nicely with other tests so these unit tests are located
// in src/test/run-pass/tempfile.rs
| 37.263566 | 86 | 0.615353 |
ac653065cc0d44069ff9555118c9cbc80e9f33ef | 19,345 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Projection Push Down optimizer rule ensures that only referenced columns are
//! loaded into memory
use crate::error::{DataFusionError, Result};
use crate::logical_plan::LogicalPlan;
use crate::optimizer::optimizer::OptimizerRule;
use crate::optimizer::utils;
use arrow::datatypes::{Field, Schema, SchemaRef};
use arrow::error::Result as ArrowResult;
use std::{collections::HashSet, sync::Arc};
use utils::optimize_explain;
/// Optimizer that removes unused projections and aggregations from plans
/// This reduces both scans and
pub struct ProjectionPushDown {}
impl OptimizerRule for ProjectionPushDown {
fn optimize(&mut self, plan: &LogicalPlan) -> Result<LogicalPlan> {
// set of all columns refered by the plan (and thus considered required by the root)
let required_columns = plan
.schema()
.fields()
.iter()
.map(|f| f.name().clone())
.collect::<HashSet<String>>();
return optimize_plan(self, plan, &required_columns, false);
}
fn name(&self) -> &str {
return "projection_push_down";
}
}
impl ProjectionPushDown {
#[allow(missing_docs)]
pub fn new() -> Self {
Self {}
}
}
fn get_projected_schema(
schema: &Schema,
projection: &Option<Vec<usize>>,
required_columns: &HashSet<String>,
has_projection: bool,
) -> Result<(Vec<usize>, SchemaRef)> {
if projection.is_some() {
return Err(DataFusionError::Internal(
"Cannot run projection push-down rule more than once".to_string(),
));
}
// once we reach the table scan, we can use the accumulated set of column
// names to construct the set of column indexes in the scan
//
// we discard non-existing columns because some column names are not part of the schema,
// e.g. when the column derives from an aggregation
let mut projection: Vec<usize> = required_columns
.iter()
.map(|name| schema.index_of(name))
.filter_map(ArrowResult::ok)
.collect();
if projection.is_empty() {
if has_projection {
// Ensure that we are reading at least one column from the table in case the query
// does not reference any columns directly such as "SELECT COUNT(1) FROM table"
projection.push(0);
} else {
// for table scan without projection, we default to return all columns
projection = schema
.fields()
.iter()
.enumerate()
.map(|(i, _)| i)
.collect::<Vec<usize>>();
}
}
// sort the projection otherwise we get non-deterministic behavior
projection.sort();
// create the projected schema
let mut projected_fields: Vec<Field> = Vec::with_capacity(projection.len());
for i in &projection {
projected_fields.push(schema.fields()[*i].clone());
}
Ok((projection, SchemaRef::new(Schema::new(projected_fields))))
}
/// Recursively transverses the logical plan removing expressions and that are not needed.
fn optimize_plan(
optimizer: &mut ProjectionPushDown,
plan: &LogicalPlan,
required_columns: &HashSet<String>, // set of columns required up to this step
has_projection: bool,
) -> Result<LogicalPlan> {
let mut new_required_columns = required_columns.clone();
match plan {
LogicalPlan::Projection {
input,
expr,
schema,
} => {
// projection:
// * remove any expression that is not required
// * construct the new set of required columns
let mut new_expr = Vec::new();
let mut new_fields = Vec::new();
// Gather all columns needed for expressions in this Projection
schema
.fields()
.iter()
.enumerate()
.map(|(i, field)| {
if required_columns.contains(field.name()) {
new_expr.push(expr[i].clone());
new_fields.push(field.clone());
// gather the new set of required columns
utils::expr_to_column_names(&expr[i], &mut new_required_columns)
} else {
Ok(())
}
})
.collect::<Result<()>>()?;
let new_input =
optimize_plan(optimizer, &input, &new_required_columns, true)?;
if new_fields.len() == 0 {
// no need for an expression at all
Ok(new_input)
} else {
Ok(LogicalPlan::Projection {
expr: new_expr,
input: Arc::new(new_input),
schema: SchemaRef::new(Schema::new(new_fields)),
})
}
}
LogicalPlan::Join {
left,
right,
on,
join_type,
schema,
} => {
for (l, r) in on {
new_required_columns.insert(l.to_owned());
new_required_columns.insert(r.to_owned());
}
Ok(LogicalPlan::Join {
left: Arc::new(optimize_plan(
optimizer,
&left,
&new_required_columns,
true,
)?),
right: Arc::new(optimize_plan(
optimizer,
&right,
&new_required_columns,
true,
)?),
join_type: join_type.clone(),
on: on.clone(),
schema: schema.clone(),
})
}
LogicalPlan::Aggregate {
schema,
input,
group_expr,
aggr_expr,
..
} => {
// aggregate:
// * remove any aggregate expression that is not required
// * construct the new set of required columns
utils::exprlist_to_column_names(group_expr, &mut new_required_columns)?;
// Gather all columns needed for expressions in this Aggregate
let mut new_aggr_expr = Vec::new();
aggr_expr
.iter()
.map(|expr| {
let name = &expr.name(&schema)?;
if required_columns.contains(name) {
new_aggr_expr.push(expr.clone());
new_required_columns.insert(name.clone());
// add to the new set of required columns
utils::expr_to_column_names(expr, &mut new_required_columns)
} else {
Ok(())
}
})
.collect::<Result<()>>()?;
let new_schema = Schema::new(
schema
.fields()
.iter()
.filter(|x| new_required_columns.contains(x.name()))
.cloned()
.collect(),
);
Ok(LogicalPlan::Aggregate {
group_expr: group_expr.clone(),
aggr_expr: new_aggr_expr,
input: Arc::new(optimize_plan(
optimizer,
&input,
&new_required_columns,
true,
)?),
schema: SchemaRef::new(new_schema),
})
}
// scans:
// * remove un-used columns from the scan projection
LogicalPlan::TableScan {
schema_name,
source,
table_schema,
projection,
..
} => {
let (projection, projected_schema) = get_projected_schema(
&table_schema,
projection,
required_columns,
has_projection,
)?;
// return the table scan with projection
Ok(LogicalPlan::TableScan {
schema_name: schema_name.to_string(),
source: source.clone(),
table_schema: table_schema.clone(),
projection: Some(projection),
projected_schema,
})
}
LogicalPlan::InMemoryScan {
data,
schema,
projection,
..
} => {
let (projection, projected_schema) = get_projected_schema(
&schema,
projection,
required_columns,
has_projection,
)?;
Ok(LogicalPlan::InMemoryScan {
data: data.clone(),
schema: schema.clone(),
projection: Some(projection),
projected_schema,
})
}
LogicalPlan::CsvScan {
path,
has_header,
delimiter,
schema,
projection,
..
} => {
let (projection, projected_schema) = get_projected_schema(
&schema,
projection,
required_columns,
has_projection,
)?;
Ok(LogicalPlan::CsvScan {
path: path.to_owned(),
has_header: *has_header,
schema: schema.clone(),
delimiter: *delimiter,
projection: Some(projection),
projected_schema,
})
}
LogicalPlan::ParquetScan {
path,
schema,
projection,
..
} => {
let (projection, projected_schema) = get_projected_schema(
&schema,
projection,
required_columns,
has_projection,
)?;
Ok(LogicalPlan::ParquetScan {
path: path.to_owned(),
schema: schema.clone(),
projection: Some(projection),
projected_schema,
})
}
LogicalPlan::Explain {
verbose,
plan,
stringified_plans,
schema,
} => optimize_explain(optimizer, *verbose, &*plan, stringified_plans, &*schema),
// all other nodes: Add any additional columns used by
// expressions in this node to the list of required columns
LogicalPlan::Limit { .. }
| LogicalPlan::Filter { .. }
| LogicalPlan::EmptyRelation { .. }
| LogicalPlan::Sort { .. }
| LogicalPlan::CreateExternalTable { .. }
| LogicalPlan::Extension { .. } => {
let expr = utils::expressions(plan);
// collect all required columns by this plan
utils::exprlist_to_column_names(&expr, &mut new_required_columns)?;
// apply the optimization to all inputs of the plan
let inputs = utils::inputs(plan);
let new_inputs = inputs
.iter()
.map(|plan| {
optimize_plan(optimizer, plan, &new_required_columns, has_projection)
})
.collect::<Result<Vec<_>>>()?;
utils::from_plan(plan, &expr, &new_inputs)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::logical_plan::{col, lit};
use crate::logical_plan::{max, min, Expr, LogicalPlanBuilder};
use crate::test::*;
use arrow::datatypes::DataType;
#[test]
fn aggregate_no_group_by() -> Result<()> {
let table_scan = test_table_scan()?;
let plan = LogicalPlanBuilder::from(&table_scan)
.aggregate(vec![], vec![max(col("b"))])?
.build()?;
let expected = "Aggregate: groupBy=[[]], aggr=[[MAX(#b)]]\
\n TableScan: test projection=Some([1])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
#[test]
fn aggregate_group_by() -> Result<()> {
let table_scan = test_table_scan()?;
let plan = LogicalPlanBuilder::from(&table_scan)
.aggregate(vec![col("c")], vec![max(col("b"))])?
.build()?;
let expected = "Aggregate: groupBy=[[#c]], aggr=[[MAX(#b)]]\
\n TableScan: test projection=Some([1, 2])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
#[test]
fn aggregate_no_group_by_with_filter() -> Result<()> {
let table_scan = test_table_scan()?;
let plan = LogicalPlanBuilder::from(&table_scan)
.filter(col("c"))?
.aggregate(vec![], vec![max(col("b"))])?
.build()?;
let expected = "Aggregate: groupBy=[[]], aggr=[[MAX(#b)]]\
\n Filter: #c\
\n TableScan: test projection=Some([1, 2])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
#[test]
fn cast() -> Result<()> {
let table_scan = test_table_scan()?;
let projection = LogicalPlanBuilder::from(&table_scan)
.project(vec![Expr::Cast {
expr: Box::new(col("c")),
data_type: DataType::Float64,
}])?
.build()?;
let expected = "Projection: CAST(#c AS Float64)\
\n TableScan: test projection=Some([2])";
assert_optimized_plan_eq(&projection, expected);
Ok(())
}
#[test]
fn table_scan_projected_schema() -> Result<()> {
let table_scan = test_table_scan()?;
assert_eq!(3, table_scan.schema().fields().len());
assert_fields_eq(&table_scan, vec!["a", "b", "c"]);
let plan = LogicalPlanBuilder::from(&table_scan)
.project(vec![col("a"), col("b")])?
.build()?;
assert_fields_eq(&plan, vec!["a", "b"]);
let expected = "Projection: #a, #b\
\n TableScan: test projection=Some([0, 1])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
#[test]
fn table_limit() -> Result<()> {
let table_scan = test_table_scan()?;
assert_eq!(3, table_scan.schema().fields().len());
assert_fields_eq(&table_scan, vec!["a", "b", "c"]);
let plan = LogicalPlanBuilder::from(&table_scan)
.project(vec![col("c"), col("a")])?
.limit(5)?
.build()?;
assert_fields_eq(&plan, vec!["c", "a"]);
let expected = "Limit: 5\
\n Projection: #c, #a\
\n TableScan: test projection=Some([0, 2])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
#[test]
fn table_scan_without_projection() -> Result<()> {
let table_scan = test_table_scan()?;
let plan = LogicalPlanBuilder::from(&table_scan).build()?;
// should expand projection to all columns without projection
let expected = "TableScan: test projection=Some([0, 1, 2])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
#[test]
fn table_scan_with_literal_projection() -> Result<()> {
let table_scan = test_table_scan()?;
let plan = LogicalPlanBuilder::from(&table_scan)
.project(vec![lit(1_i64), lit(2_i64)])?
.build()?;
let expected = "Projection: Int64(1), Int64(2)\
\n TableScan: test projection=Some([0])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
/// tests that it removes unused columns in projections
#[test]
fn table_unused_column() -> Result<()> {
let table_scan = test_table_scan()?;
assert_eq!(3, table_scan.schema().fields().len());
assert_fields_eq(&table_scan, vec!["a", "b", "c"]);
// we never use "b" in the first projection => remove it
let plan = LogicalPlanBuilder::from(&table_scan)
.project(vec![col("c"), col("a"), col("b")])?
.filter(col("c").gt(lit(1)))?
.aggregate(vec![col("c")], vec![max(col("a"))])?
.build()?;
assert_fields_eq(&plan, vec!["c", "MAX(a)"]);
let expected = "\
Aggregate: groupBy=[[#c]], aggr=[[MAX(#a)]]\
\n Filter: #c Gt Int32(1)\
\n Projection: #c, #a\
\n TableScan: test projection=Some([0, 2])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
/// tests that it removes un-needed projections
#[test]
fn table_unused_projection() -> Result<()> {
let table_scan = test_table_scan()?;
assert_eq!(3, table_scan.schema().fields().len());
assert_fields_eq(&table_scan, vec!["a", "b", "c"]);
// there is no need for the first projection
let plan = LogicalPlanBuilder::from(&table_scan)
.project(vec![col("b")])?
.project(vec![lit(1).alias("a")])?
.build()?;
assert_fields_eq(&plan, vec!["a"]);
let expected = "\
Projection: Int32(1) AS a\
\n TableScan: test projection=Some([0])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
/// tests that it removes an aggregate is never used downstream
#[test]
fn table_unused_aggregate() -> Result<()> {
let table_scan = test_table_scan()?;
assert_eq!(3, table_scan.schema().fields().len());
assert_fields_eq(&table_scan, vec!["a", "b", "c"]);
// we never use "min(b)" => remove it
let plan = LogicalPlanBuilder::from(&table_scan)
.aggregate(vec![col("a"), col("c")], vec![max(col("b")), min(col("b"))])?
.filter(col("c").gt(lit(1)))?
.project(vec![col("c"), col("a"), col("MAX(b)")])?
.build()?;
assert_fields_eq(&plan, vec!["c", "a", "MAX(b)"]);
let expected = "\
Projection: #c, #a, #MAX(b)\
\n Filter: #c Gt Int32(1)\
\n Aggregate: groupBy=[[#a, #c]], aggr=[[MAX(#b)]]\
\n TableScan: test projection=Some([0, 1, 2])";
assert_optimized_plan_eq(&plan, expected);
Ok(())
}
fn assert_optimized_plan_eq(plan: &LogicalPlan, expected: &str) {
let optimized_plan = optimize(plan).expect("failed to optimize plan");
let formatted_plan = format!("{:?}", optimized_plan);
assert_eq!(formatted_plan, expected);
}
fn optimize(plan: &LogicalPlan) -> Result<LogicalPlan> {
let mut rule = ProjectionPushDown::new();
rule.optimize(plan)
}
}
| 32.512605 | 94 | 0.523391 |
33c719be85fbc3fab4fb5b39dc588934b1736f4f | 8,866 | use bevy::{prelude::*, ecs::event::Events};
use bevy_kira_audio::AudioChannel;
use bevy_rapier3d::prelude::*;
use iyes_loopless::prelude::*;
use crate::{
assets::audio::{AudioHandleStorage, AudioCollection, DropAudioChannel, BackgroundAudioChannel},
glue::Glue,
movement::WASDMovement,
constants::{COL_GROUP_EJECTED_TOY, COL_GROUP_TOY_EJECTION_SHELV, COL_GROUP_GLASS},
toy::ToySensor,
GameState
};
#[derive(Default)]
pub struct ClawPlugin;
impl Plugin for ClawPlugin {
fn build(&self, app: &mut App) {
app
.add_event::<ReleaseClawEvent>()
.add_event::<ToyCatchEvent>()
.add_system_set(
ConditionSet::new()
.run_in_state(GameState::InGame)
.with_system(claw_lift_sync_system)
.with_system(release_claw_with_keyboard_system)
.with_system(claw_lift_activation_system.run_on_event::<ReleaseClawEvent>())
.with_system(claw_lift_system)
.with_system(claw_return_system)
.with_system(claw_manual_control_system)
.with_system(claw_stopper_event_manager_system)
.into()
);
}
}
pub struct ReleaseClawEvent;
pub struct ToyCatchEvent;
pub enum ClawControllerState {
Locked,
Manual,
ReturnToBase(Vec3)
}
#[derive(Debug)]
pub enum ClawLiftState {
Off,
Down,
Wait(f32),
Up
}
#[derive(Component)]
pub struct ClawController(pub ClawControllerState);
#[derive(Component)]
pub struct ClawLift(pub ClawLiftState);
#[derive(Component)]
pub struct ClawObject;
#[derive(Component)]
pub struct ClawSensor;
#[derive(Component)]
pub struct ClawStopper;
#[derive(Component)]
pub struct PositionLock;
impl ClawController {
pub const BASE_POS: [f32; 3] = [0.54, 3.65, 0.54];
pub const STEP: f32 = 1.2;
}
impl ClawLift {
pub const START_HEIGHT: f32 = 3.65;
pub const SPEED: f32 = 1.0;
}
const DROP_SFX: [AudioCollection; 6] = [
AudioCollection::Drop1,
AudioCollection::Drop2,
AudioCollection::Drop3,
AudioCollection::Drop4,
AudioCollection::Drop5,
AudioCollection::Drop6,
];
fn claw_lift_sync_system(
claw_object_query: Query<&Transform, With<ClawController>>,
mut claw_lift_query: Query<&mut Transform, (With<ClawLift>, Without<ClawController>)>,
) {
if let (Ok(claw_object_position), Ok(mut claw_lift_position)) = (
claw_object_query.get_single(),claw_lift_query.get_single_mut()
) {
let mut next_position = claw_object_position.translation;
next_position.y = claw_lift_position.translation.y;
claw_lift_position.translation = next_position.into();
}
}
fn claw_lift_activation_system(
audio_drop: Res<AudioChannel<DropAudioChannel>>,
audio_background: Res<AudioChannel<BackgroundAudioChannel>>,
audio_storage: Res<AudioHandleStorage>,
mut claw_lift_query: Query<&mut ClawLift>,
mut claw_controller_query: Query<&mut ClawController>,
) {
if let (Ok(mut claw_lift), Ok(mut claw_controller)) = (
claw_lift_query.get_single_mut(), claw_controller_query.get_single_mut()
) {
if let Some(drop_sfx) = audio_storage.get_random(&DROP_SFX) {
audio_background.stop();
audio_drop.set_volume(1.5);
audio_drop.play(drop_sfx.clone());
}
claw_controller.0 = ClawControllerState::Locked;
claw_lift.0 = ClawLiftState::Down;
}
}
fn release_claw_with_keyboard_system(
keyboard: Res<Input<KeyCode>>,
mut events: EventWriter<ReleaseClawEvent>
) {
if keyboard.just_pressed(KeyCode::Return) {
events.send(ReleaseClawEvent);
}
}
fn claw_lift_system(
time: Res<Time>,
mut collision_events: EventReader<CollisionEvent>,
mut toy_catch_events: EventWriter<ToyCatchEvent>,
mut claw_lift_query: Query<(&mut ClawLift, &mut Transform)>,
claw_stopper_query: Query<Entity, With<ClawStopper>>,
claw_sensor_query: Query<Entity, With<ClawSensor>>,
toy_sensor_query: Query<Entity, With<ToySensor>>,
mut claw_controller_query: Query<(&mut ClawController, &Transform), Without<ClawLift>>,
parent_query: Query<&Parent>,
mut commands: Commands,
) {
if let Ok((mut claw_lift, mut claw_lift_position)) = claw_lift_query.get_single_mut() {
let height = claw_lift_position.translation.y;
match claw_lift.0 {
ClawLiftState::Down => {
claw_lift_position.translation.y -= ClawLift::SPEED * time.delta_seconds();
if let Ok(claw_stopper) = claw_stopper_query.get_single() {
for event in collision_events.iter() {
// println!("Received collision event: {:?}", event);
if let CollisionEvent::Started(entity1, entity2, _) = event {
let entities = [entity1, entity2];
if let Ok(claw_sensor) = claw_sensor_query.get_single() {
for toy_sensor in toy_sensor_query.iter() {
if entities.into_iter().any(|entity| entity == &claw_sensor)
&& entities.into_iter().any(|entity| entity == &toy_sensor) {
if let Ok(toy) = parent_query.get(toy_sensor) {
commands.entity(claw_sensor).insert(Glue(toy.0));
toy_catch_events.send(ToyCatchEvent);
}
}
}
}
if entities.into_iter().any(|entity| entity == &claw_stopper) {
claw_lift.0 = ClawLiftState::Wait(1.0);
break;
}
}
}
}
},
ClawLiftState::Wait(seconds_remain) => {
if seconds_remain > 0.0 {
claw_lift.0 = ClawLiftState::Wait(seconds_remain - time.delta_seconds());
} else {
claw_lift.0 = ClawLiftState::Up;
}
}
ClawLiftState::Up => {
if height <= ClawLift::START_HEIGHT {
claw_lift_position.translation.y += ClawLift::SPEED * time.delta_seconds();
} else {
if let Ok((mut claw_controller, transform)) = claw_controller_query.get_single_mut() {
claw_controller.0 = ClawControllerState::ReturnToBase(transform.translation);
}
claw_lift_position.translation.y = ClawLift::START_HEIGHT;
claw_lift.0 = ClawLiftState::Off;
}
},
_ => {}
}
}
}
fn claw_return_system(
time: Res<Time>,
mut claw_controller_query: Query<(&mut ClawController, &mut Transform)>,
glue_query: Query<(Entity, &Glue), With<ClawSensor>>,
mut commands: Commands,
) {
if let Ok((mut claw_controller, mut transform)) = claw_controller_query.get_single_mut() {
if let ClawControllerState::ReturnToBase(start_pos) = claw_controller.0 {
let base = Vec3::from(ClawController::BASE_POS);
let current_diff = base - transform.translation;
let start_diff = base - start_pos;
let step = start_diff / ClawController::STEP * time.delta_seconds();
if current_diff.abs().max_element() > step.abs().max_element() {
transform.translation += step;
} else {
if let Ok((entity, glue)) = glue_query.get_single() {
commands.entity(entity).remove::<Glue>();
commands.entity(glue.0).insert(CollisionGroups::new(
COL_GROUP_EJECTED_TOY,
COL_GROUP_GLASS + COL_GROUP_TOY_EJECTION_SHELV
));
}
claw_controller.0 = ClawControllerState::Locked;
commands.insert_resource(NextState(GameState::ResultsMenu));
}
}
}
}
fn claw_manual_control_system(
mut claw_controller_query: Query<(Entity, &ClawController), Changed<ClawController>>,
mut commands: Commands,
) {
if let Ok((entity, claw_controller)) = claw_controller_query.get_single_mut() {
if let ClawControllerState::Manual = claw_controller.0 {
commands.entity(entity).insert(WASDMovement);
} else {
commands.entity(entity).remove::<WASDMovement>();
}
}
}
fn claw_stopper_event_manager_system(
mut events: ResMut<Events<CollisionEvent>>,
) {
events.update();
} | 35.464 | 106 | 0.593842 |
284e5d22ab05d7022eb308e30349fcb0c8078d5d | 5,806 | /*
Copyright (C) 2018-2019 [email protected]
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use super::super::super::super::iced_constants::IcedConstants;
use super::super::super::test_utils::from_str_conv::to_vec_u8;
use super::super::super::test_utils::{create_decoder, get_formatter_unit_tests_dir};
use super::super::super::tests::misc;
use super::super::super::tests::mnemonic_opts_parser::MnemonicOptionsTestParser;
use super::super::super::*;
use super::super::info::InstrOpInfo;
use super::super::regs::Registers;
use super::fmt_factory;
#[cfg(not(feature = "std"))]
use alloc::string::String;
#[test]
fn methods_panic_if_invalid_operand_or_instruction_operand() {
misc::methods_panic_if_invalid_operand_or_instruction_operand(|| fmt_factory::create());
}
#[test]
fn test_op_index() {
misc::test_op_index(|| fmt_factory::create());
}
#[test]
fn register_is_not_too_big() {
const MAX_VALUE: u32 = IcedConstants::NUMBER_OF_REGISTERS as u32 - 1 + Registers::EXTRA_REGISTERS;
const_assert!(MAX_VALUE < (1 << InstrOpInfo::TEST_REGISTER_BITS));
const_assert!(MAX_VALUE >= (1 << (InstrOpInfo::TEST_REGISTER_BITS - 1)));
}
#[test]
fn verify_default_formatter_options() {
let options = FormatterOptions::with_nasm();
assert!(!options.uppercase_prefixes());
assert!(!options.uppercase_mnemonics());
assert!(!options.uppercase_registers());
assert!(!options.uppercase_keywords());
assert!(!options.uppercase_decorators());
assert!(!options.uppercase_all());
assert_eq!(0, options.first_operand_char_index());
assert_eq!(0, options.tab_size());
assert!(!options.space_after_operand_separator());
assert!(!options.space_after_memory_bracket());
assert!(!options.space_between_memory_add_operators());
assert!(!options.space_between_memory_mul_operators());
assert!(!options.scale_before_index());
assert!(!options.always_show_scale());
assert!(!options.always_show_segment_register());
assert!(!options.show_zero_displacements());
assert_eq!("", options.hex_prefix());
assert_eq!("h", options.hex_suffix());
assert_eq!(4, options.hex_digit_group_size());
assert_eq!("", options.decimal_prefix());
assert_eq!("", options.decimal_suffix());
assert_eq!(3, options.decimal_digit_group_size());
assert_eq!("", options.octal_prefix());
assert_eq!("o", options.octal_suffix());
assert_eq!(4, options.octal_digit_group_size());
assert_eq!("", options.binary_prefix());
assert_eq!("b", options.binary_suffix());
assert_eq!(4, options.binary_digit_group_size());
assert_eq!("", options.digit_separator());
assert!(!options.leading_zeroes());
assert!(options.uppercase_hex());
assert!(options.small_hex_numbers_in_decimal());
assert!(options.add_leading_zero_to_hex_numbers());
assert_eq!(NumberBase::Hexadecimal, options.number_base());
assert!(options.branch_leading_zeroes());
assert!(!options.signed_immediate_operands());
assert!(options.signed_memory_displacements());
assert!(!options.displacement_leading_zeroes());
assert_eq!(MemorySizeOptions::Default, options.memory_size_options());
assert!(!options.rip_relative_addresses());
assert!(options.show_branch_size());
assert!(options.use_pseudo_ops());
assert!(!options.show_symbol_address());
assert!(!options.prefer_st0());
assert_eq!(CC_b::b, options.cc_b());
assert_eq!(CC_ae::ae, options.cc_ae());
assert_eq!(CC_e::e, options.cc_e());
assert_eq!(CC_ne::ne, options.cc_ne());
assert_eq!(CC_be::be, options.cc_be());
assert_eq!(CC_a::a, options.cc_a());
assert_eq!(CC_p::p, options.cc_p());
assert_eq!(CC_np::np, options.cc_np());
assert_eq!(CC_l::l, options.cc_l());
assert_eq!(CC_ge::ge, options.cc_ge());
assert_eq!(CC_le::le, options.cc_le());
assert_eq!(CC_g::g, options.cc_g());
assert!(!options.show_useless_prefixes());
assert!(!options.gas_naked_registers());
assert!(!options.gas_show_mnemonic_size_suffix());
assert!(!options.gas_space_after_memory_operand_comma());
assert!(options.masm_add_ds_prefix32());
assert!(options.masm_symbol_displ_in_brackets());
assert!(options.masm_displ_in_brackets());
assert!(!options.nasm_show_sign_extended_immediate_size());
}
#[test]
fn verify_formatter_options() {
assert_eq!(&FormatterOptions::with_nasm(), NasmFormatter::new().options());
}
#[test]
fn format_mnemonic_options() {
let mut path = get_formatter_unit_tests_dir();
path.push("Nasm");
path.push("MnemonicOptions.txt");
for tc in MnemonicOptionsTestParser::new(&path) {
let hex_bytes = to_vec_u8(&tc.hex_bytes).unwrap();
let mut decoder = create_decoder(tc.bitness, &hex_bytes, DecoderOptions::NONE).0;
let instruction = decoder.decode();
assert_eq!(tc.code, instruction.code());
let mut formatter = fmt_factory::create();
let mut output = String::new();
formatter.format_mnemonic_options(&instruction, &mut output, tc.flags);
assert_eq!(tc.formatted_string, output);
}
}
| 40.601399 | 99 | 0.761109 |
f8e6ea1650a81617f38b887835a16ed1fe12cf2b | 3,907 | mod pty;
mod err;
use ::descriptor::Descriptor;
use ::libc;
pub use self::err::{ForkError, Result};
pub use self::pty::{Master, MasterError};
pub use self::pty::{Slave, SlaveError};
use std::ffi::CString;
#[derive(Debug)]
pub enum Fork {
// Parent child's pid and master's pty.
Parent(libc::pid_t, Master),
// Child pid 0.
Child(Slave),
}
impl Fork {
/// The constructor function `new` forks the program
/// and returns the current pid.
pub fn new(path: &'static str) -> Result<Self> {
match Master::new(CString::new(path).ok().unwrap_or_default().as_ptr()) {
Err(cause) => Err(ForkError::BadMaster(cause)),
Ok(master) => unsafe {
if let Some(cause) = master.grantpt().err().or(master.unlockpt().err()) {
Err(ForkError::BadMaster(cause))
} else {
match libc::fork() {
-1 => Err(ForkError::Failure),
0 => {
match master.ptsname() {
Err(cause) => Err(ForkError::BadMaster(cause)),
Ok(name) => Fork::from_pts(name),
}
}
pid => Ok(Fork::Parent(pid, master)),
}
}
},
}
}
/// The constructor function `from_pts` is a private
/// extention from the constructor function `new` who
/// prepares and returns the child.
fn from_pts(ptsname: *const ::libc::c_char) -> Result<Self> {
unsafe {
if libc::setsid() == -1 {
Err(ForkError::SetsidFail)
} else {
match Slave::new(ptsname) {
Err(cause) => Err(ForkError::BadSlave(cause)),
Ok(slave) => {
if let Some(cause) = slave.dup2(libc::STDIN_FILENO)
.err()
.or(slave.dup2(libc::STDOUT_FILENO)
.err()
.or(slave.dup2(libc::STDERR_FILENO).err())) {
Err(ForkError::BadSlave(cause))
} else {
Ok(Fork::Child(slave))
}
}
}
}
}
}
/// The constructor function `from_ptmx` forks the program
/// and returns the current pid for a default PTMX's path.
pub fn from_ptmx() -> Result<Self> {
Fork::new(::DEFAULT_PTMX)
}
/// Waits until it's terminated.
pub fn wait(&self) -> Result<libc::pid_t> {
match *self {
Fork::Child(_) => Err(ForkError::IsChild),
Fork::Parent(pid, _) => {
loop {
unsafe {
match libc::waitpid(pid, &mut 0, 0) {
0 => continue,
-1 => return Err(ForkError::WaitpidFail),
_ => return Ok(pid),
}
}
}
}
}
}
/// The function `is_parent` returns the pid or parent
/// or none.
pub fn is_parent(&self) -> Result<Master> {
match *self {
Fork::Child(_) => Err(ForkError::IsChild),
Fork::Parent(_, ref master) => Ok(master.clone()),
}
}
/// The function `is_child` returns the pid or child
/// or none.
pub fn is_child(&self) -> Result<&Slave> {
match *self {
Fork::Parent(_, _) => Err(ForkError::IsParent),
Fork::Child(ref slave) => Ok(slave),
}
}
}
impl Drop for Fork {
fn drop(&mut self) {
match *self {
Fork::Parent(_, ref master) => Descriptor::drop(master),
_ => {}
}
}
}
| 32.02459 | 89 | 0.444075 |
f861d63aba0f3c4f77cf6927c732b0bb3624ef61 | 7,430 | /// This declares a list of types which can be allocated by `Arena`.
///
/// The `few` modifier will cause allocation to use the shared arena and recording the destructor.
/// This is faster and more memory efficient if there's only a few allocations of the type.
/// Leaving `few` out will cause the type to get its own dedicated `TypedArena` which is
/// faster and more memory efficient if there is lots of allocations.
///
/// Specifying the `decode` modifier will add decode impls for `&T` and `&[T]` where `T` is the type
/// listed. These impls will appear in the implement_ty_decoder! macro.
#[macro_export]
macro_rules! arena_types {
($macro:path, $args:tt, $tcx:lifetime) => (
$macro!($args, [
[] layouts: rustc_target::abi::Layout, rustc_target::abi::Layout;
// AdtDef are interned and compared by address
[] adt_def: rustc_middle::ty::AdtDef, rustc_middle::ty::AdtDef;
[decode] tables: rustc_middle::ty::TypeckTables<$tcx>, rustc_middle::ty::TypeckTables<'_x>;
[] const_allocs: rustc_middle::mir::interpret::Allocation, rustc_middle::mir::interpret::Allocation;
// Required for the incremental on-disk cache
[few, decode] mir_keys: rustc_hir::def_id::DefIdSet, rustc_hir::def_id::DefIdSet;
[] region_scope_tree: rustc_middle::middle::region::ScopeTree, rustc_middle::middle::region::ScopeTree;
[] dropck_outlives:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx,
rustc_middle::traits::query::DropckOutlivesResult<'tcx>
>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y,
rustc_middle::traits::query::DropckOutlivesResult<'_z>
>
>;
[] normalize_projection_ty:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx,
rustc_middle::traits::query::NormalizationResult<'tcx>
>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y,
rustc_middle::traits::query::NormalizationResult<'_z>
>
>;
[] implied_outlives_bounds:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx,
Vec<rustc_middle::traits::query::OutlivesBound<'tcx>>
>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y,
Vec<rustc_middle::traits::query::OutlivesBound<'_z>>
>
>;
[] type_op_subtype:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx, ()>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y, ()>
>;
[] type_op_normalize_poly_fn_sig:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::PolyFnSig<'tcx>>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y, rustc_middle::ty::PolyFnSig<'_z>>
>;
[] type_op_normalize_fn_sig:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::FnSig<'tcx>>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y, rustc_middle::ty::FnSig<'_z>>
>;
[] type_op_normalize_predicate:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::Predicate<'tcx>>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y, rustc_middle::ty::Predicate<'_z>>
>;
[] type_op_normalize_ty:
rustc_middle::infer::canonical::Canonical<'tcx,
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::Ty<'tcx>>
>,
rustc_middle::infer::canonical::Canonical<'_x,
rustc_middle::infer::canonical::QueryResponse<'_y, &'_z rustc_middle::ty::TyS<'_w>>
>;
[few] all_traits: Vec<rustc_hir::def_id::DefId>, Vec<rustc_hir::def_id::DefId>;
[few] privacy_access_levels: rustc_middle::middle::privacy::AccessLevels, rustc_middle::middle::privacy::AccessLevels;
[few] foreign_module: rustc_middle::middle::cstore::ForeignModule, rustc_middle::middle::cstore::ForeignModule;
[few] foreign_modules: Vec<rustc_middle::middle::cstore::ForeignModule>, Vec<rustc_middle::middle::cstore::ForeignModule>;
[] upvars_mentioned: rustc_data_structures::fx::FxIndexMap<rustc_hir::HirId, rustc_hir::Upvar>, rustc_data_structures::fx::FxIndexMap<rustc_hir::HirId, rustc_hir::Upvar>;
[] object_safety_violations: rustc_middle::traits::ObjectSafetyViolation, rustc_middle::traits::ObjectSafetyViolation;
[] codegen_unit: rustc_middle::mir::mono::CodegenUnit<$tcx>, rustc_middle::mir::mono::CodegenUnit<'_x>;
[] attribute: rustc_ast::ast::Attribute, rustc_ast::ast::Attribute;
[] name_set: rustc_data_structures::fx::FxHashSet<rustc_span::symbol::Symbol>, rustc_data_structures::fx::FxHashSet<rustc_span::symbol::Symbol>;
[] hir_id_set: rustc_hir::HirIdSet, rustc_hir::HirIdSet;
// Interned types
[] tys: rustc_middle::ty::TyS<$tcx>, rustc_middle::ty::TyS<'_x>;
// HIR query types
[few] indexed_hir: rustc_middle::hir::map::IndexedHir<$tcx>, rustc_middle::hir::map::IndexedHir<'_x>;
[few] hir_definitions: rustc_hir::definitions::Definitions, rustc_hir::definitions::Definitions;
[] hir_owner: rustc_middle::hir::Owner<$tcx>, rustc_middle::hir::Owner<'_x>;
[] hir_owner_nodes: rustc_middle::hir::OwnerNodes<$tcx>, rustc_middle::hir::OwnerNodes<'_x>;
// Note that this deliberately duplicates items in the `rustc_hir::arena`,
// since we need to allocate this type on both the `rustc_hir` arena
// (during lowering) and the `librustc_middle` arena (for decoding MIR)
[decode] asm_template: rustc_ast::ast::InlineAsmTemplatePiece, rustc_ast::ast::InlineAsmTemplatePiece;
// This is used to decode the &'tcx [Span] for InlineAsm's line_spans.
[decode] span: rustc_span::Span, rustc_span::Span;
], $tcx);
)
}
arena_types!(rustc_arena::declare_arena, [], 'tcx);
| 60.901639 | 182 | 0.600673 |
db61474a1c967e5a090cf98ead3960f77966e99e | 1,493 | // Crates that have the "proc-macro" crate type are only allowed to export
// procedural macros. So we cannot have one crate that defines procedural macros
// alongside other types of public APIs like traits and structs.
//
// For this project we are going to need a #[bitfield] macro but also a trait
// and some structs. We solve this by defining the trait and structs in this
// crate, defining the attribute macro in a separate bitfield-impl crate, and
// then re-exporting the macro from this crate so that users only have one crate
// that they need to import.
//
// From the perspective of a user of this crate, they get all the necessary APIs
// (macro, trait, struct) through the one bitfield crate.
pub use bitfield_impl::bitfield;
// TODO other things
use seq::seq;
pub trait Specifier {
const BITS: usize;
type SIGNATURE;
}
impl Specifier for bool {
const BITS: usize = 1;
type SIGNATURE = bool;
}
seq!(N in 1..=8 {
pub enum B#N {}
impl Specifier for B#N {
const BITS: usize = N;
type SIGNATURE = u8;
}
});
seq!(N in 9..=16 {
pub enum B#N {}
impl Specifier for B#N {
const BITS: usize = N;
type SIGNATURE = u16;
}
});
seq!(N in 17..=32 {
pub enum B#N {}
impl Specifier for B#N {
const BITS: usize = N;
type SIGNATURE = u32;
}
});
seq!(N in 33..=64 {
pub enum B#N {}
impl Specifier for B#N {
const BITS: usize = N;
type SIGNATURE = u64;
}
});
| 23.698413 | 80 | 0.637642 |
09dcd56a8de698acdf957e6cf1ef72ef51917469 | 3,005 | #![no_std]
#[macro_use]
extern crate alloc;
extern crate contract_ffi;
use alloc::prelude::v1::{String, Vec};
use contract_ffi::contract_api::pointers::{ContractPointer, UPointer};
use contract_ffi::contract_api::{
call_contract, create_purse, get_arg, get_uref, main_purse, read, revert,
transfer_from_purse_to_account, transfer_from_purse_to_purse, PurseTransferResult,
TransferResult,
};
use contract_ffi::key::Key;
use contract_ffi::uref::AccessRights;
use contract_ffi::value::account::{PublicKey, PurseId};
use contract_ffi::value::U512;
enum Error {
GetPosOuterURef = 1000,
GetPosInnerURef = 1001,
PurseToPurseTransfer = 1002,
UnableToSeedAccount = 1003,
UnknownCommand = 1004,
}
fn purse_to_key(p: PurseId) -> Key {
Key::URef(p.value())
}
fn get_pos_contract() -> ContractPointer {
let outer: UPointer<Key> = get_uref("pos")
.and_then(Key::to_u_ptr)
.unwrap_or_else(|| revert(Error::GetPosInnerURef as u32));
if let Some(ContractPointer::URef(inner)) = read::<Key>(outer).to_c_ptr() {
ContractPointer::URef(UPointer::new(inner.0, AccessRights::READ))
} else {
revert(Error::GetPosOuterURef as u32)
}
}
fn bond(pos: &ContractPointer, amount: &U512, source: PurseId) {
call_contract::<_, ()>(
pos.clone(),
&(POS_BOND, *amount, source),
&vec![purse_to_key(source)],
);
}
fn unbond(pos: &ContractPointer, amount: Option<U512>) {
call_contract::<_, ()>(pos.clone(), &(POS_UNBOND, amount), &Vec::<Key>::new());
}
const POS_BOND: &str = "bond";
const POS_UNBOND: &str = "unbond";
const TEST_BOND: &str = "bond";
const TEST_BOND_FROM_MAIN_PURSE: &str = "bond-from-main-purse";
const TEST_SEED_NEW_ACCOUNT: &str = "seed_new_account";
const TEST_UNBOND: &str = "unbond";
#[no_mangle]
pub extern "C" fn call() {
let pos_pointer = get_pos_contract();
let command: String = get_arg(0);
if command == TEST_BOND {
// Creates new purse with desired amount based on main purse and sends funds
let amount = get_arg(1);
let p1 = create_purse();
if transfer_from_purse_to_purse(main_purse(), p1, amount)
== PurseTransferResult::TransferError
{
revert(Error::PurseToPurseTransfer as u32);
}
bond(&pos_pointer, &amount, p1);
} else if command == TEST_BOND_FROM_MAIN_PURSE {
let amount = get_arg(1);
bond(&pos_pointer, &amount, main_purse());
} else if command == TEST_SEED_NEW_ACCOUNT {
let account: PublicKey = get_arg(1);
let amount: U512 = get_arg(2);
if transfer_from_purse_to_account(main_purse(), account, amount)
== TransferResult::TransferError
{
revert(Error::UnableToSeedAccount as u32);
}
} else if command == TEST_UNBOND {
let maybe_amount: Option<U512> = get_arg(1);
unbond(&pos_pointer, maybe_amount);
} else {
revert(Error::UnknownCommand as u32);
}
}
| 30.05 | 86 | 0.65624 |
8f455566ed3665562300c8d40180bee12f9f36a0 | 1,937 | use std::io;
use std::u8;
use std::io::Write;
use nom::{IResult, be_u8};
const BT_PROTOCOL: &'static [u8] = b"BitTorrent protocol";
const BT_PROTOCOL_LEN: u8 = 19;
/// `Protocol` information transmitted as part of the handshake.
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Protocol {
BitTorrent,
Custom(Vec<u8>)
}
impl Protocol {
/// Create a `Protocol` from the given bytes.
pub fn from_bytes(bytes: &[u8]) -> IResult<&[u8], Protocol> {
parse_protocol(bytes)
}
/// Write the `Protocol` out to the given writer.
pub fn write_bytes<W>(&self, mut writer: W) -> io::Result<()>
where W: Write {
let (len, bytes) = match self {
&Protocol::BitTorrent => (BT_PROTOCOL_LEN as usize, &BT_PROTOCOL[..]),
&Protocol::Custom(ref prot) => (prot.len(), &prot[..])
};
try!(writer.write_all(&[len as u8][..]));
try!(writer.write_all(bytes));
Ok(())
}
/// Get the legth of the given protocol (does not include the length byte).
pub fn write_len(&self) -> usize {
match self {
&Protocol::BitTorrent => BT_PROTOCOL_LEN as usize,
&Protocol::Custom(ref custom) => custom.len()
}
}
}
fn parse_protocol(bytes: &[u8]) -> IResult<&[u8], Protocol> {
parse_real_protocol(bytes)
}
#[allow(unreachable_patterns, unused)]
fn parse_real_protocol(bytes: &[u8]) -> IResult<&[u8], Protocol> {
switch!(bytes, parse_raw_protocol,
// TODO: Move back to using constant here, for now, MIR compiler error occurs
b"BitTorrent protocol" => value!(Protocol::BitTorrent) |
custom => value!(Protocol::Custom(custom.to_vec()))
)
}
fn parse_raw_protocol(bytes: &[u8]) -> IResult<&[u8], &[u8]> {
do_parse!(bytes,
length: be_u8 >>
raw_protocol: take!(length) >>
(raw_protocol)
)
}
| 29.348485 | 88 | 0.583376 |
89b02e1423556914bba4a7e73bca252a19cd6ec8 | 127 | mod en_us;
pub use en_us::*;
pub const WELCOME_TO_MANDEL: &str = "Welcome to Mandel!";
pub const UNTITLED: &str = "Untitled";
| 21.166667 | 57 | 0.700787 |
223213be1ad3b7d49dd46f6346b6bdc3d75ca152 | 8,387 | use super::defs::*;
use super::parser::*;
use std::mem::size_of;
pub trait ElfHeader {
fn from_le_bytes(buf: &[u8]) -> Result<Self, ReadErr>
where
Self: Sized;
fn from_be_bytes(buf: &[u8]) -> Result<Self, ReadErr>
where
Self: Sized;
fn describe() -> String;
fn from_bytes(buf: &[u8], endianness: u8) -> Result<Self, String>
where
Self: Sized,
{
if endianness == ELF_DATA2LSB {
Self::from_le_bytes(buf)
} else {
Self::from_be_bytes(buf)
}
.map_err(|a| format!("failed to read {}: {}", Self::describe(), a))
}
}
// We do this because we can't access struct fields of a generic type
pub trait ElfXXEhdr<ElfXXAddr, ElfXXHalf, ElfXXWord, ElfXXOff> {
fn e_ident(&self) -> [u8; 16];
fn e_type(&self) -> ElfXXHalf;
fn e_machine(&self) -> ElfXXHalf;
fn e_version(&self) -> ElfXXWord;
fn e_entry(&self) -> ElfXXAddr;
fn e_phoff(&self) -> ElfXXOff;
fn e_shoff(&self) -> ElfXXOff;
fn e_flags(&self) -> ElfXXWord;
fn e_ehsize(&self) -> ElfXXHalf;
fn e_phentsize(&self) -> ElfXXHalf;
fn e_phnum(&self) -> ElfXXHalf;
fn e_shentsize(&self) -> ElfXXHalf;
fn e_shnum(&self) -> ElfXXHalf;
fn e_shstrndx(&self) -> ElfXXHalf;
}
pub trait ElfXXPhdr<ElfXXAddr, ElfXXWord, ElfXXOff, ElfXXXword> {
fn p_type(&self) -> ElfXXWord;
fn p_flags(&self) -> ElfXXWord;
fn p_offset(&self) -> ElfXXOff;
fn p_vaddr(&self) -> ElfXXAddr;
fn p_paddr(&self) -> ElfXXAddr;
fn p_filesz(&self) -> ElfXXXword;
fn p_memsz(&self) -> ElfXXXword;
fn p_align(&self) -> ElfXXXword;
}
pub trait ElfXXShdr<ElfXXAddr, ElfXXWord, ElfXXOff, ElfXXXword> {
fn sh_name(&self) -> ElfXXWord;
fn sh_type(&self) -> ElfXXWord;
fn sh_flags(&self) -> ElfXXXword;
fn sh_addr(&self) -> ElfXXAddr;
fn sh_offset(&self) -> ElfXXOff;
fn sh_size(&self) -> ElfXXXword;
fn sh_link(&self) -> ElfXXWord;
fn sh_info(&self) -> ElfXXWord;
fn sh_addralign(&self) -> ElfXXXword;
fn sh_entsize(&self) -> ElfXXXword;
}
macro_rules! read_field {
($name:ident, $field:ident) => {
$name
.$field()
.try_into()
.map_err(|_| format!("failed to read {}", stringify!($field)))
};
}
pub trait ElfXX<EhdrT, PhdrT, ShdrT, ElfXXAddr, ElfXXHalf, ElfXXWord, ElfXXOff, ElfXXXword>
where
EhdrT: ElfHeader + ElfXXEhdr<ElfXXAddr, ElfXXHalf, ElfXXWord, ElfXXOff>,
PhdrT: ElfHeader + ElfXXPhdr<ElfXXAddr, ElfXXWord, ElfXXOff, ElfXXXword>,
ShdrT: ElfHeader + ElfXXShdr<ElfXXAddr, ElfXXWord, ElfXXOff, ElfXXXword>,
u32: From<ElfXXWord>,
u64: From<ElfXXXword>,
ElfXXAddr: std::convert::TryInto<usize> + std::fmt::LowerHex,
ElfXXHalf: std::convert::Into<u16> + std::fmt::Display,
ElfXXWord: std::convert::TryInto<usize> + std::fmt::LowerHex,
ElfXXOff: std::convert::TryInto<usize> + std::fmt::Display,
ElfXXXword: std::convert::TryInto<usize>,
{
fn parse(buf: &[u8], ident: &ParsedIdent, elf: &mut ParsedElf) -> Result<(), String> {
let ehdr_size = size_of::<EhdrT>();
if buf.len() < ehdr_size {
return Err(String::from("file is smaller than ELF file header"));
}
let ehdr = EhdrT::from_bytes(&buf[0..ehdr_size], ident.endianness)?;
elf.shstrndx = ehdr.e_shstrndx().into();
Self::parse_ehdr(&ehdr, elf);
Self::parse_phdrs(buf, ident.endianness, &ehdr, elf)?;
Self::parse_shdrs(buf, ident.endianness, &ehdr, elf)?;
Ok(())
}
fn parse_ehdr(ehdr: &EhdrT, elf: &mut ParsedElf) {
Self::push_ehdr_info(ehdr, &mut elf.information);
Self::add_ehdr_ranges(ehdr, &mut elf.ranges);
}
fn push_ehdr_info(ehdr: &EhdrT, information: &mut Vec<InfoTuple>) {
information.push(("e_type", "Type", type_to_string(ehdr.e_type().into())));
information.push((
"e_machine",
"Architecture",
machine_to_string(ehdr.e_machine().into()),
));
information.push(("e_entry", "Entrypoint", format!("0x{:x}", ehdr.e_entry())));
information.push((
"ph",
"Program headers",
format!(
"<span id='info_e_phnum'>{}</span> * \
<span id='info_e_phentsize'>{}</span> @ \
<span id='info_e_phoff'>{}</span>",
ehdr.e_phnum(),
ehdr.e_phentsize(),
ehdr.e_phoff()
),
));
information.push((
"sh",
"Section headers",
format!(
"<span id='info_e_shnum'>{}</span> * \
<span id='info_e_shentsize'>{}</span> @ \
<span id='info_e_shoff'>{}</span>",
ehdr.e_shnum(),
ehdr.e_shentsize(),
ehdr.e_shoff()
),
));
if u32::from(ehdr.e_flags()) != 0 {
information.push(("e_flags", "Flags", format!("0x{:x}", ehdr.e_flags())));
}
}
fn add_ehdr_ranges(ehdr: &EhdrT, ranges: &mut Ranges);
fn parse_phdrs(
buf: &[u8],
endianness: u8,
ehdr: &EhdrT,
elf: &mut ParsedElf,
) -> Result<(), String> {
let mut start = read_field!(ehdr, e_phoff)?;
let phsize = size_of::<PhdrT>();
for i in 0..ehdr.e_phnum().into() {
let phdr = PhdrT::from_bytes(&buf[start..start + phsize], endianness)?;
let parsed = Self::parse_phdr(&phdr)?;
let ranges = &mut elf.ranges;
if parsed.file_offset != 0 && parsed.file_size != 0 {
ranges.add_range(parsed.file_offset, parsed.file_size, RangeType::Segment(i));
}
ranges.add_range(start, phsize, RangeType::ProgramHeader(i as u32));
Self::add_phdr_ranges(start, ranges);
elf.phdrs.push(parsed);
start += phsize;
}
Ok(())
}
fn parse_phdr(phdr: &PhdrT) -> Result<ParsedPhdr, String> {
let file_offset = read_field!(phdr, p_offset)?;
let file_size = read_field!(phdr, p_filesz)?;
let vaddr = read_field!(phdr, p_vaddr)?;
let memsz = read_field!(phdr, p_memsz)?;
let alignment = read_field!(phdr, p_align)?;
Ok(ParsedPhdr {
ptype: phdr.p_type().into(),
flags: pflags_to_string(phdr.p_flags().into()),
file_offset,
file_size,
vaddr,
memsz,
alignment,
})
}
fn add_phdr_ranges(start: usize, ranges: &mut Ranges);
fn parse_shdrs(
buf: &[u8],
endianness: u8,
ehdr: &EhdrT,
elf: &mut ParsedElf,
) -> Result<(), String> {
let mut start = read_field!(ehdr, e_shoff)?;
let shsize = size_of::<ShdrT>();
for i in 0..ehdr.e_shnum().into() {
let shdr = ShdrT::from_bytes(&buf[start..start + shsize], endianness)?;
let parsed = Self::parse_shdr(buf, endianness, &shdr)?;
let ranges = &mut elf.ranges;
if parsed.file_offset != 0 && parsed.size != 0 && parsed.shtype != SHT_NOBITS {
ranges.add_range(parsed.file_offset, parsed.size, RangeType::Section(i));
}
ranges.add_range(start, shsize, RangeType::SectionHeader(i as u32));
Self::add_shdr_ranges(start, ranges);
elf.shdrs.push(parsed);
start += shsize;
}
Ok(())
}
fn parse_shdr(_buf: &[u8], _endianness: u8, shdr: &ShdrT) -> Result<ParsedShdr, String> {
let name = read_field!(shdr, sh_name)?;
let addr = read_field!(shdr, sh_addr)?;
let file_offset = read_field!(shdr, sh_offset)?;
let size = read_field!(shdr, sh_size)?;
let link = read_field!(shdr, sh_link)?;
let info = read_field!(shdr, sh_info)?;
let addralign = read_field!(shdr, sh_addralign)?;
let entsize = read_field!(shdr, sh_entsize)?;
Ok(ParsedShdr {
name,
shtype: shdr.sh_type().into(),
flags: shdr.sh_flags().into(),
addr,
file_offset,
size,
link,
info,
addralign,
entsize,
})
}
fn add_shdr_ranges(start: usize, ranges: &mut Ranges);
}
| 31.411985 | 94 | 0.557053 |
fcc5dc6d5627f2e91bcd3c6f064a144e26a22f66 | 6,297 | // Std
use std::io::Write;
// Internal
use crate::utils;
use crate::Generator;
use clap::*;
/// Generate fish completion file
///
/// Note: The fish generator currently only supports named options (-o/--option), not positional arguments.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Fish;
impl Generator for Fish {
fn file_name(&self, name: &str) -> String {
format!("{}.fish", name)
}
fn generate(&self, app: &App, buf: &mut dyn Write) {
let command = app.get_bin_name().unwrap();
let mut buffer = String::new();
gen_fish_inner(command, &[], app, &mut buffer);
w!(buf, buffer.as_bytes());
}
}
// Escape string inside single quotes
fn escape_string(string: &str) -> String {
string.replace("\\", "\\\\").replace("'", "\\'")
}
fn gen_fish_inner(root_command: &str, parent_commands: &[&str], app: &App, buffer: &mut String) {
debug!("gen_fish_inner");
// example :
//
// complete
// -c {command}
// -d "{description}"
// -s {short}
// -l {long}
// -a "{possible_arguments}"
// -r # if require parameter
// -f # don't use file completion
// -n "__fish_use_subcommand" # complete for command "myprog"
// -n "__fish_seen_subcommand_from subcmd1" # complete for command "myprog subcmd1"
let mut basic_template = format!("complete -c {}", root_command);
if parent_commands.is_empty() {
if app.has_subcommands() {
basic_template.push_str(" -n \"__fish_use_subcommand\"");
}
} else {
basic_template.push_str(
format!(
" -n \"{}\"",
parent_commands
.iter()
.map(|command| format!("__fish_seen_subcommand_from {}", command))
.chain(
app.get_subcommands()
.map(|command| format!("not __fish_seen_subcommand_from {}", command))
)
.collect::<Vec<_>>()
.join("; and ")
)
.as_str(),
);
}
debug!("gen_fish_inner: parent_commands={:?}", parent_commands);
for option in app.get_opts() {
let mut template = basic_template.clone();
if let Some(shorts) = option.get_short_and_visible_aliases() {
for short in shorts {
template.push_str(format!(" -s {}", short).as_str());
}
}
if let Some(longs) = option.get_long_and_visible_aliases() {
for long in longs {
template.push_str(format!(" -l {}", escape_string(long)).as_str());
}
}
if let Some(data) = option.get_about() {
template.push_str(format!(" -d '{}'", escape_string(data)).as_str());
}
template.push_str(value_completion(option).as_str());
buffer.push_str(template.as_str());
buffer.push('\n');
}
for flag in utils::flags(app) {
let mut template = basic_template.clone();
if let Some(shorts) = flag.get_short_and_visible_aliases() {
for short in shorts {
template.push_str(format!(" -s {}", short).as_str());
}
}
if let Some(longs) = flag.get_long_and_visible_aliases() {
for long in longs {
template.push_str(format!(" -l {}", escape_string(long)).as_str());
}
}
if let Some(data) = flag.get_about() {
template.push_str(format!(" -d '{}'", escape_string(data)).as_str());
}
buffer.push_str(template.as_str());
buffer.push('\n');
}
for subcommand in app.get_subcommands() {
let mut template = basic_template.clone();
template.push_str(" -f");
template.push_str(format!(" -a \"{}\"", &subcommand.get_name()).as_str());
if let Some(data) = subcommand.get_about() {
template.push_str(format!(" -d '{}'", escape_string(data)).as_str())
}
buffer.push_str(template.as_str());
buffer.push('\n');
}
// generate options of subcommands
for subcommand in app.get_subcommands() {
let mut parent_commands: Vec<_> = parent_commands.into();
parent_commands.push(subcommand.get_name());
gen_fish_inner(root_command, &parent_commands, subcommand, buffer);
}
}
fn value_completion(option: &Arg) -> String {
if !option.is_set(ArgSettings::TakesValue) {
return "".to_string();
}
if let Some(data) = option.get_possible_values() {
// We return the possible values with their own empty description e.g. {a\t,b\t}
// this makes sure that a and b don't get the description of the option or argument
format!(
" -r -f -a \"{{{}}}\"",
data.iter()
.filter_map(|value| if value.is_hidden() {
None
} else {
Some(format!(
"{}\t{}",
value.get_name(),
value.get_about().unwrap_or_default()
))
})
.collect::<Vec<_>>()
.join(",")
)
} else {
// NB! If you change this, please also update the table in `ValueHint` documentation.
match option.get_value_hint() {
ValueHint::Unknown => " -r",
// fish has no built-in support to distinguish these
ValueHint::AnyPath | ValueHint::FilePath | ValueHint::ExecutablePath => " -r -F",
ValueHint::DirPath => " -r -f -a \"(__fish_complete_directories)\"",
// It seems fish has no built-in support for completing command + arguments as
// single string (CommandString). Complete just the command name.
ValueHint::CommandString | ValueHint::CommandName => {
" -r -f -a \"(__fish_complete_command)\""
}
ValueHint::Username => " -r -f -a \"(__fish_complete_users)\"",
ValueHint::Hostname => " -r -f -a \"(__fish_print_hostnames)\"",
// Disable completion for others
_ => " -r -f",
}
.to_string()
}
}
| 33.494681 | 107 | 0.531841 |
6154f5ec657202895855685b0633da324b2240f9 | 13,538 | use parity_scale_codec::Encode;
use parity_scale_codec_derive::{Decode, Encode};
use crate::{build_solidity, first_error, first_warning, no_errors, parse_and_resolve};
use solang::Target;
#[derive(Debug, PartialEq, Encode, Decode)]
struct RevertReturn(u32, String);
#[test]
fn contract_name() {
let ns = parse_and_resolve(
"contract test {
function test() public {}
}",
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"function cannot have same name as the contract"
);
let ns = parse_and_resolve(
"contract test {
enum test { a}
}",
Target::default_substrate(),
);
assert_eq!(
first_warning(ns.diagnostics),
"test is already defined as a contract name"
);
let ns = parse_and_resolve(
"contract test {
bool test;
}",
Target::default_substrate(),
);
assert_eq!(
first_warning(ns.diagnostics),
"test is already defined as a contract name"
);
let ns = parse_and_resolve(
"contract test {
struct test { bool a; }
}",
Target::default_substrate(),
);
assert_eq!(
first_warning(ns.diagnostics),
"test is already defined as a contract name"
);
let ns = parse_and_resolve(
"contract test {
function f() public {
int test;
}
}",
Target::default_substrate(),
);
assert_eq!(
first_warning(ns.diagnostics),
"declaration of ‘test’ shadows contract name"
);
let ns = parse_and_resolve(
"contract test {
function f(int test) public {
}
}",
Target::default_substrate(),
);
assert_eq!(
first_warning(ns.diagnostics),
"declaration of ‘test’ shadows contract name"
);
let ns = parse_and_resolve(
"contract test {
function f() public returns (int test) {
return 0;
}
}",
Target::default_substrate(),
);
assert_eq!(
first_warning(ns.diagnostics),
"declaration of ‘test’ shadows contract name"
);
let ns = parse_and_resolve(
r#"
contract a {
function x() public {
b y = new b();
}
}
contract b {
function x() public {
a y = new a();
}
}
"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"circular reference creating contract ‘a’"
);
let ns = parse_and_resolve(
r#"
contract a {
function x() public {
b y = new b();
}
}
contract b {
function x() public {
c y = new c();
}
}
contract c {
function x() public {
a y = new a();
}
}
"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"circular reference creating contract ‘a’"
);
}
#[test]
fn contract_type() {
let ns = parse_and_resolve(
r#"
contract printer {
function test() public {
print("In f.test()");
}
}
contract foo {
function test1(printer x) public {
address y = x;
}
function test2(address x) public {
printer y = printer(x);
}
}"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"implicit conversion to address from contract printer not allowed"
);
let ns = parse_and_resolve(
r#"
contract printer {
function test() public {
printer x = printer(address(102));
}
}"#,
Target::default_substrate(),
);
no_errors(ns.diagnostics);
let ns = parse_and_resolve(
r#"
contract printer {
function test() public {
print("In f.test()");
}
}
contract foo {
function test1(printer x) public {
address y = 102;
}
}"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"expected ‘address’, found integer"
);
let ns = parse_and_resolve(
r#"
contract printer {
function test() public {
print("In f.test()");
}
}
contract foo {
function test1() public {
printer y = 102;
}
}"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"expected ‘contract printer’, found integer"
);
let ns = parse_and_resolve(
r#"
contract printer {
function test() public returns (printer) {
return new printer();
}
}"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"new cannot construct current contract ‘printer’"
);
let ns = parse_and_resolve(
r#"
contract printer {
function test() public returns (printer) {
return new printer({});
}
}"#,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"new cannot construct current contract ‘printer’"
);
}
#[test]
fn external_call() {
let ns = parse_and_resolve(
r##"
contract c {
b x;
function test() public returns (int32) {
return x.get_x();
}
}
contract b {
function get_x(int32 t) public returns (int32) {
return 1;
}
}"##,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"function expects 1 arguments, 0 provided"
);
let ns = parse_and_resolve(
r##"
contract c {
b x;
function test() public returns (int32) {
return x.get_x({b: false});
}
}
contract b {
function get_x(int32 t, bool b) public returns (int32) {
return 1;
}
}"##,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"function expects 2 arguments, 1 provided"
);
let ns = parse_and_resolve(
r##"
contract c {
b x;
constructor() public {
x = new b(102);
}
function test() public returns (int32) {
return x.get_x({ t: 10, t: false });
}
}
contract b {
int32 x;
constructor(int32 a) public {
x = a;
}
function get_x(int32 t) public returns (int32) {
return x * t;
}
}"##,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"duplicate argument with name ‘t’"
);
let ns = parse_and_resolve(
r##"
contract c {
b x;
constructor() public {
x = new b({ a: 1, a: 2 });
}
function test() public returns (int32) {
return x.get_x({ t: 10 });
}
}
contract b {
int32 x;
constructor(int32 a) public {
x = a;
}
function get_x(int32 t) public returns (int32) {
return x * t;
}
}"##,
Target::default_substrate(),
);
assert_eq!(first_error(ns.diagnostics), "duplicate argument name ‘a’");
#[derive(Debug, PartialEq, Encode, Decode)]
struct Ret(u32);
let mut runtime = build_solidity(
r##"
contract c {
b x;
constructor() public {
x = new b(102);
}
function test() public returns (int32) {
return x.get_x({ t: 10 });
}
}
contract b {
int32 x;
constructor(int32 a) public {
x = a;
}
function get_x(int32 t) public returns (int32) {
return x * t;
}
}"##,
);
runtime.constructor(0, Vec::new());
runtime.function("test", Vec::new());
assert_eq!(runtime.vm.output, Ret(1020).encode());
}
#[test]
fn revert_external_call() {
let mut runtime = build_solidity(
r##"
contract c {
b x;
constructor() public {
x = new b(102);
}
function test() public returns (int32) {
return x.get_x({ t: 10 });
}
}
contract b {
int32 x;
constructor(int32 a) public {
x = a;
}
function get_x(int32 t) public returns (int32) {
revert("The reason why");
}
}"##,
);
runtime.constructor(0, Vec::new());
runtime.function_expect_failure("test", Vec::new());
}
#[test]
fn revert_constructor() {
let mut runtime = build_solidity(
r##"
contract c {
b x;
constructor() public {
}
function test() public returns (int32) {
x = new b(102);
return x.get_x({ t: 10 });
}
}
contract b {
int32 x;
constructor(int32 a) public {
require(a == 0, "Hello,\
World!");
}
function get_x(int32 t) public returns (int32) {
return x * t;
}
}"##,
);
runtime.constructor(0, Vec::new());
runtime.function_expect_failure("test", Vec::new());
assert_eq!(runtime.vm.output.len(), 0);
}
#[test]
fn external_datatypes() {
#[derive(Debug, PartialEq, Encode, Decode)]
struct Ret(u64);
let mut runtime = build_solidity(
r##"
contract c {
b x;
constructor() public {
x = new b(102);
}
function test() public returns (int64) {
strukt k = x.get_x(10, "foobar", true, strukt({ f1: "abcd", f2: address(555555), f3: -1 }));
assert(k.f1 == "1234");
assert(k.f2 == address(102));
return int64(k.f3);
}
}
contract b {
int x;
constructor(int a) public {
x = a;
}
function get_x(int t, string s, bool y, strukt k) public returns (strukt) {
assert(y == true);
assert(t == 10);
assert(s == "foobar");
assert(k.f1 == "abcd");
return strukt({ f1: "1234", f2: address(102), f3: x * t });
}
}
struct strukt {
bytes4 f1;
address f2;
int f3;
}"##,
);
runtime.constructor(0, Vec::new());
runtime.function("test", Vec::new());
assert_eq!(runtime.vm.output, Ret(1020).encode());
}
#[test]
fn creation_code() {
let ns = parse_and_resolve(
r##"
contract a {
function test() public {
bytes code = type(b).creationCode;
}
}
contract b {
int x;
function test() public {
a f = new a();
}
}
"##,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"circular reference creating contract ‘a’"
);
let ns = parse_and_resolve(
r##"
contract a {
function test() public {
bytes code = type(a).runtimeCode;
}
}"##,
Target::default_substrate(),
);
assert_eq!(
first_error(ns.diagnostics),
"containing our own contract code for ‘a’ would generate infinite size contract"
);
let mut runtime = build_solidity(
r##"
contract c {
function test() public returns (bytes) {
bytes runtime = type(b).runtimeCode;
assert(runtime[0] == 0);
assert(runtime[1] == 0x61); // a
assert(runtime[2] == 0x73); // s
assert(runtime[3] == 0x6d); // m
bytes creation = type(b).creationCode;
// on Substrate, they are the same
assert(creation == runtime);
return creation;
}
}
contract b {
int x;
constructor(int a) public {
x = a;
}
}"##,
);
runtime.constructor(0, Vec::new());
runtime.function("test", Vec::new());
#[derive(Debug, PartialEq, Encode, Decode)]
struct Ret(Vec<u8>);
// return value should be the code for the second contract
assert_eq!(
runtime.vm.output,
Ret(runtime.contracts[1].0.clone()).encode()
);
}
| 22.714765 | 108 | 0.460629 |
23bb42673f692ea10c5884f5df4b4e2369271539 | 1,259 | #[repr(C)]
#[derive(Clone, Copy)]
pub struct RgbU8 {
pub r: u8,
pub g: u8,
pub b: u8,
}
impl RgbU8 {
pub fn as_u8(self) -> [u8; 3] {
[self.r, self.g, self.b]
}
}
impl From<&RgbF32> for RgbU8 {
fn from(rgb_f32: &RgbF32) -> Self {
Self {
r: (rgb_f32.r.clamp(0.0, 1.0) * 255.999) as u8,
g: (rgb_f32.g.clamp(0.0, 1.0) * 255.999) as u8,
b: (rgb_f32.b.clamp(0.0, 1.0) * 255.999) as u8,
}
}
}
pub struct RgbF32 {
pub r: f32,
pub g: f32,
pub b: f32,
}
impl RgbF32 {
pub const BLACK: Self = Self::new(0.0, 0.0, 0.0);
pub const fn new(r: f32, g: f32, b: f32) -> Self {
Self { r, g, b }
}
}
impl std::ops::Div<f32> for RgbF32 {
type Output = Self;
fn div(self, rhs: f32) -> Self::Output {
Self {
r: self.r / rhs,
g: self.g / rhs,
b: self.b / rhs,
}
}
}
impl std::ops::AddAssign for RgbF32 {
fn add_assign(&mut self, rhs: Self) {
self.r += rhs.r;
self.g += rhs.g;
self.b += rhs.b;
}
}
impl std::ops::DivAssign<f32> for RgbF32 {
fn div_assign(&mut self, rhs: f32) {
self.r /= rhs;
self.g /= rhs;
self.b /= rhs;
}
}
| 19.075758 | 59 | 0.471803 |
5df0ef9a4c93ef9f4cab3037cc6d32d681a198cd | 1,519 | mod overflowing_add;
mod overflowing_sub;
use crate::RangedI32;
use arith_traits::Overflow;
// Suppress false positive recursion warning
#[allow(unconditional_recursion)]
impl<const START: i32, const END: i32> Overflow<i32> for RangedI32<START, END> {
type Output = (Self, bool);
#[inline]
fn overflowing_abs(self) -> Self::Output { self.overflowing_abs() }
#[inline]
#[must_use]
fn overflowing_add(self, rhs: i32) -> Self::Output { self.overflowing_add(rhs) }
#[inline]
fn overflowing_div(self, rhs: i32) -> Self::Output { self.overflowing_div(rhs) }
#[inline]
fn overflowing_div_euclid(self, rhs: i32) -> Self::Output { self.overflowing_div_euclid(rhs) }
#[inline]
fn overflowing_mul(self, rhs: i32) -> Self::Output { self.overflowing_mul(rhs) }
#[inline]
fn overflowing_neg(self) -> Self::Output { self.overflowing_neg() }
#[inline]
fn overflowing_pow(self, rhs: u32) -> Self::Output { self.overflowing_pow(rhs) }
#[inline]
fn overflowing_rem(self, rhs: i32) -> Self::Output { self.overflowing_rem(rhs) }
#[inline]
fn overflowing_rem_euclid(self, rhs: i32) -> Self::Output { self.overflowing_rem_euclid(rhs) }
#[inline]
fn overflowing_shl(self, rhs: u32) -> Self::Output { self.overflowing_shl(rhs) }
#[inline]
fn overflowing_shr(self, rhs: u32) -> Self::Output { self.overflowing_shr(rhs) }
#[inline]
fn overflowing_sub(self, rhs: i32) -> Self::Output { self.overflowing_sub(rhs) }
}
| 24.5 | 98 | 0.668861 |
9b98be7dc21964345e6068c5649a5b8c23d427a2 | 5,865 | use core::convert::Infallible;
use super::{
dynamic::PinModeError, marker, DynamicPin, ErasedPin, Input, OpenDrain, Output,
PartiallyErasedPin, Pin, PinMode, PinState,
};
use embedded_hal::digital::v2::{
InputPin, IoPin, OutputPin, StatefulOutputPin, ToggleableOutputPin,
};
// Implementations for `Pin`
impl<const P: char, const N: u8, MODE> OutputPin for Pin<P, N, Output<MODE>> {
type Error = Infallible;
#[inline(always)]
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high();
Ok(())
}
#[inline(always)]
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low();
Ok(())
}
}
impl<const P: char, const N: u8, MODE> StatefulOutputPin for Pin<P, N, Output<MODE>> {
#[inline(always)]
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_high())
}
#[inline(always)]
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_low())
}
}
impl<const P: char, const N: u8, MODE> ToggleableOutputPin for Pin<P, N, Output<MODE>> {
type Error = Infallible;
#[inline(always)]
fn toggle(&mut self) -> Result<(), Self::Error> {
self.toggle();
Ok(())
}
}
impl<const P: char, const N: u8, MODE> InputPin for Pin<P, N, MODE>
where
MODE: marker::Readable,
{
type Error = Infallible;
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
impl<const P: char, const N: u8> IoPin<Self, Self> for Pin<P, N, Output<OpenDrain>> {
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(state);
Ok(self)
}
}
impl<const P: char, const N: u8, Otype> IoPin<Pin<P, N, Input>, Self> for Pin<P, N, Output<Otype>>
where
Output<Otype>: PinMode,
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Pin<P, N, Input>, Self::Error> {
Ok(self.into_input())
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(state);
Ok(self)
}
}
impl<const P: char, const N: u8, Otype> IoPin<Self, Pin<P, N, Output<Otype>>> for Pin<P, N, Input>
where
Output<Otype>: PinMode,
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(mut self, state: PinState) -> Result<Pin<P, N, Output<Otype>>, Self::Error> {
self._set_state(state);
Ok(self.into_mode())
}
}
// Implementations for `ErasedPin`
impl<MODE> OutputPin for ErasedPin<Output<MODE>> {
type Error = core::convert::Infallible;
#[inline(always)]
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high();
Ok(())
}
#[inline(always)]
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low();
Ok(())
}
}
impl<MODE> StatefulOutputPin for ErasedPin<Output<MODE>> {
#[inline(always)]
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_high())
}
#[inline(always)]
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_low())
}
}
impl<MODE> ToggleableOutputPin for ErasedPin<Output<MODE>> {
type Error = Infallible;
#[inline(always)]
fn toggle(&mut self) -> Result<(), Self::Error> {
self.toggle();
Ok(())
}
}
impl<MODE> InputPin for ErasedPin<MODE>
where
MODE: marker::Readable,
{
type Error = core::convert::Infallible;
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
// Implementations for `PartiallyErasedPin`
impl<const P: char, MODE> OutputPin for PartiallyErasedPin<P, Output<MODE>> {
type Error = Infallible;
#[inline(always)]
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high();
Ok(())
}
#[inline(always)]
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low();
Ok(())
}
}
impl<const P: char, MODE> StatefulOutputPin for PartiallyErasedPin<P, Output<MODE>> {
#[inline(always)]
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_high())
}
#[inline(always)]
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_low())
}
}
impl<const P: char, MODE> ToggleableOutputPin for PartiallyErasedPin<P, Output<MODE>> {
type Error = Infallible;
#[inline(always)]
fn toggle(&mut self) -> Result<(), Self::Error> {
self.toggle();
Ok(())
}
}
impl<const P: char, MODE> InputPin for PartiallyErasedPin<P, MODE>
where
MODE: marker::Readable,
{
type Error = Infallible;
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
// Implementations for `DynamicPin`
impl<const P: char, const N: u8> OutputPin for DynamicPin<P, N> {
type Error = PinModeError;
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high()
}
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low()
}
}
impl<const P: char, const N: u8> InputPin for DynamicPin<P, N> {
type Error = PinModeError;
fn is_high(&self) -> Result<bool, Self::Error> {
self.is_high()
}
fn is_low(&self) -> Result<bool, Self::Error> {
self.is_low()
}
}
| 24.135802 | 100 | 0.589258 |
9c194793fc93bbefced67a8eadcd0a3d777ed38b | 17,303 | // Enable some rust 2018 idioms.
#![warn(bare_trait_objects)]
#![warn(unused_extern_crates)]
#[cfg(feature = "system_alloc")]
use std::alloc::System;
#[cfg(feature = "system_alloc")]
#[global_allocator]
static A: System = System;
#[macro_use]
extern crate clap;
#[macro_use]
extern crate log;
use std::io::BufWriter;
// Mode
const OPT_FILE: &str = "file";
const OPT_DIFF: &str = "diff";
// Print format
const OPT_OUTPUT: &str = "format";
const OPT_OUTPUT_TEXT: &str = "text";
const OPT_OUTPUT_HTML: &str = "html";
// Print categories
const OPT_CATEGORY: &str = "category";
const OPT_CATEGORY_FILE: &str = "file";
const OPT_CATEGORY_UNIT: &str = "unit";
const OPT_CATEGORY_TYPE: &str = "type";
const OPT_CATEGORY_FUNCTION: &str = "function";
const OPT_CATEGORY_VARIABLE: &str = "variable";
// Print fields
const OPT_PRINT: &str = "print";
const OPT_PRINT_ALL: &str = "all";
const OPT_PRINT_ADDRESS: &str = "address";
const OPT_PRINT_SOURCE: &str = "source";
const OPT_PRINT_FILE_ADDRESS: &str = "file-address";
const OPT_PRINT_UNIT_ADDRESS: &str = "unit-address";
const OPT_PRINT_FUNCTION_CALLS: &str = "function-calls";
const OPT_PRINT_FUNCTION_INSTRUCTIONS: &str = "function-instructions";
const OPT_PRINT_FUNCTION_VARIABLES: &str = "function-variables";
const OPT_PRINT_FUNCTION_STACK_FRAME: &str = "function-stack-frame";
const OPT_PRINT_INLINED_FUNCTION_PARAMETERS: &str = "inlined-function-parameters";
const OPT_PRINT_VARIABLE_LOCATIONS: &str = "variable-locations";
// Print parameters
const OPT_INLINE_DEPTH: &str = "inline-depth";
// Filters
const OPT_FILTER: &str = "filter";
const OPT_FILTER_INLINE: &str = "inline";
const OPT_FILTER_FUNCTION_INLINE: &str = "function-inline";
const OPT_FILTER_NAME: &str = "name";
const OPT_FILTER_NAMESPACE: &str = "namespace";
const OPT_FILTER_UNIT: &str = "unit";
// Sorting
const OPT_SORT: &str = "sort";
const OPT_SORT_SIZE: &str = "size";
const OPT_SORT_NAME: &str = "name";
// Diff options
const OPT_IGNORE: &str = "ignore";
const OPT_IGNORE_ADDED: &str = "added";
const OPT_IGNORE_DELETED: &str = "deleted";
const OPT_IGNORE_ADDRESS: &str = "address";
const OPT_IGNORE_SYMBOL_NAME: &str = "symbol-name";
const OPT_IGNORE_FUNCTION_ADDRESS: &str = "function-address";
const OPT_IGNORE_FUNCTION_SIZE: &str = "function-size";
const OPT_IGNORE_FUNCTION_INLINE: &str = "function-inline";
const OPT_IGNORE_FUNCTION_SYMBOL_NAME: &str = "function-symbol-name";
const OPT_IGNORE_VARIABLE_ADDRESS: &str = "variable-address";
const OPT_IGNORE_VARIABLE_SYMBOL_NAME: &str = "variable-symbol-name";
const OPT_PREFIX_MAP: &str = "prefix-map";
fn main() {
env_logger::init();
let matches = clap::App::new("ddbug")
.version(crate_version!())
.setting(clap::AppSettings::UnifiedHelpMessage)
.arg(
clap::Arg::with_name(OPT_FILE)
.help("Path of file to print")
.value_name("FILE")
.index(1)
.required_unless(OPT_DIFF)
.conflicts_with(OPT_DIFF),
)
.arg(
clap::Arg::with_name(OPT_DIFF)
.short("d")
.long(OPT_DIFF)
.help("Print difference between two files")
.value_names(&["FILE", "FILE"]),
)
.arg(
clap::Arg::with_name(OPT_OUTPUT)
.short("o")
.long(OPT_OUTPUT)
.help("Output format")
.takes_value(true)
.value_name("FORMAT")
.possible_values(&[OPT_OUTPUT_TEXT, OPT_OUTPUT_HTML]),
)
.arg(
clap::Arg::with_name(OPT_CATEGORY)
.short("c")
.long(OPT_CATEGORY)
.help("Categories of entries to print (defaults to all)")
.takes_value(true)
.multiple(true)
.require_delimiter(true)
.value_name("CATEGORY")
.possible_values(&[
OPT_CATEGORY_FILE,
OPT_CATEGORY_UNIT,
OPT_CATEGORY_TYPE,
OPT_CATEGORY_FUNCTION,
OPT_CATEGORY_VARIABLE,
]),
)
.arg(
clap::Arg::with_name(OPT_PRINT)
.short("p")
.long(OPT_PRINT)
.help("Print extra fields within entries")
.takes_value(true)
.multiple(true)
.require_delimiter(true)
.value_name("FIELD")
.possible_values(&[
OPT_PRINT_ALL,
OPT_PRINT_ADDRESS,
OPT_PRINT_SOURCE,
OPT_PRINT_FILE_ADDRESS,
OPT_PRINT_UNIT_ADDRESS,
OPT_PRINT_FUNCTION_CALLS,
OPT_PRINT_FUNCTION_INSTRUCTIONS,
OPT_PRINT_FUNCTION_VARIABLES,
OPT_PRINT_FUNCTION_STACK_FRAME,
OPT_PRINT_INLINED_FUNCTION_PARAMETERS,
OPT_PRINT_VARIABLE_LOCATIONS,
]),
)
.arg(
clap::Arg::with_name(OPT_INLINE_DEPTH)
.long(OPT_INLINE_DEPTH)
.help("Depth of inlined function calls to print (defaults to 1, 0 to disable)")
.value_name("DEPTH"),
)
.arg(
clap::Arg::with_name(OPT_FILTER)
.short("f")
.long(OPT_FILTER)
.help("Print only entries that match the given filters")
.takes_value(true)
.multiple(true)
.require_delimiter(true)
.value_name("FILTER"),
)
.arg(
clap::Arg::with_name(OPT_SORT)
.short("s")
.long(OPT_SORT)
.help("Sort entries by the given key")
.takes_value(true)
.value_name("KEY")
.possible_values(&[OPT_SORT_NAME, OPT_SORT_SIZE]),
)
.arg(
clap::Arg::with_name(OPT_IGNORE)
.short("i")
.long(OPT_IGNORE)
.help("Don't print differences due to the given types of changes")
.requires(OPT_DIFF)
.takes_value(true)
.multiple(true)
.require_delimiter(true)
.value_name("CHANGE")
.possible_values(&[
OPT_IGNORE_ADDED,
OPT_IGNORE_DELETED,
OPT_IGNORE_ADDRESS,
OPT_IGNORE_SYMBOL_NAME,
OPT_IGNORE_FUNCTION_ADDRESS,
OPT_IGNORE_FUNCTION_SIZE,
OPT_IGNORE_FUNCTION_INLINE,
OPT_IGNORE_FUNCTION_SYMBOL_NAME,
OPT_IGNORE_VARIABLE_ADDRESS,
OPT_IGNORE_VARIABLE_SYMBOL_NAME,
]),
)
.arg(
clap::Arg::with_name(OPT_PREFIX_MAP)
.long(OPT_PREFIX_MAP)
.help("When comparing file paths, replace the 'old' prefix with the 'new' prefix")
.requires(OPT_DIFF)
.takes_value(true)
.multiple(true)
.require_delimiter(true)
.value_name("OLD>=<NEW"),
)
.after_help(concat!(
"FILTERS:\n",
" function-inline=<yes|no> Match function 'inline' value\n",
" name=<string> Match entries with the given name\n",
" namespace=<string> Match entries within the given namespace\n",
" unit=<string> Match entries within the given unit\n"
))
.get_matches();
let mut options = ddbug::Options::default();
options.inline_depth = if let Some(inline_depth) = matches.value_of(OPT_INLINE_DEPTH) {
match inline_depth.parse::<usize>() {
Ok(inline_depth) => inline_depth,
Err(_) => {
clap::Error::with_description(
&format!("invalid {} value: {}", OPT_INLINE_DEPTH, inline_depth),
clap::ErrorKind::InvalidValue,
)
.exit();
}
}
} else {
1
};
if let Some(value) = matches.value_of(OPT_OUTPUT) {
match value {
OPT_OUTPUT_TEXT => options.html = false,
OPT_OUTPUT_HTML => options.html = true,
_ => clap::Error::with_description(
&format!("invalid {} value: {}", OPT_OUTPUT, value),
clap::ErrorKind::InvalidValue,
)
.exit(),
}
} else {
options.html = false;
}
if let Some(values) = matches.values_of(OPT_CATEGORY) {
for value in values {
match value {
OPT_CATEGORY_FILE => options.category_file = true,
OPT_CATEGORY_UNIT => options.category_unit = true,
OPT_CATEGORY_TYPE => options.category_type = true,
OPT_CATEGORY_FUNCTION => options.category_function = true,
OPT_CATEGORY_VARIABLE => options.category_variable = true,
_ => clap::Error::with_description(
&format!("invalid {} value: {}", OPT_CATEGORY, value),
clap::ErrorKind::InvalidValue,
)
.exit(),
}
}
} else {
options.category_file = true;
options.category_unit = true;
options.category_type = true;
options.category_function = true;
options.category_variable = true;
}
if let Some(values) = matches.values_of(OPT_PRINT) {
for value in values {
match value {
OPT_PRINT_ALL => {
options.print_file_address = true;
options.print_unit_address = true;
options.print_source = true;
options.print_function_calls = true;
options.print_function_instructions = true;
options.print_function_variables = true;
options.print_function_stack_frame = true;
options.print_inlined_function_parameters = true;
options.print_variable_locations = true;
}
OPT_PRINT_ADDRESS => {
options.print_file_address = true;
options.print_unit_address = true;
}
OPT_PRINT_SOURCE => options.print_source = true,
OPT_PRINT_FILE_ADDRESS => options.print_file_address = true,
OPT_PRINT_UNIT_ADDRESS => options.print_unit_address = true,
OPT_PRINT_FUNCTION_CALLS => options.print_function_calls = true,
OPT_PRINT_FUNCTION_INSTRUCTIONS => options.print_function_instructions = true,
OPT_PRINT_FUNCTION_VARIABLES => options.print_function_variables = true,
OPT_PRINT_FUNCTION_STACK_FRAME => options.print_function_stack_frame = true,
OPT_PRINT_INLINED_FUNCTION_PARAMETERS => {
options.print_inlined_function_parameters = true
}
OPT_PRINT_VARIABLE_LOCATIONS => options.print_variable_locations = true,
_ => clap::Error::with_description(
&format!("invalid {} value: {}", OPT_PRINT, value),
clap::ErrorKind::InvalidValue,
)
.exit(),
}
}
}
if let Some(values) = matches.values_of(OPT_FILTER) {
for value in values {
if let Some(index) = value.bytes().position(|c| c == b'=') {
let key = &value[..index];
let value = &value[index + 1..];
match key {
OPT_FILTER_INLINE | OPT_FILTER_FUNCTION_INLINE => {
options.filter_function_inline = match value {
"y" | "yes" => Some(true),
"n" | "no" => Some(false),
_ => clap::Error::with_description(
&format!("invalid {} {} value: {}", OPT_FILTER, key, value),
clap::ErrorKind::InvalidValue,
)
.exit(),
};
}
OPT_FILTER_NAME => options.filter_name = Some(value),
OPT_FILTER_NAMESPACE => options.filter_namespace = value.split("::").collect(),
OPT_FILTER_UNIT => options.filter_unit = Some(value),
_ => clap::Error::with_description(
&format!("invalid {} key: {}", OPT_FILTER, key),
clap::ErrorKind::InvalidValue,
)
.exit(),
}
} else {
clap::Error::with_description(
&format!("missing {} value for key: {}", OPT_FILTER, value),
clap::ErrorKind::InvalidValue,
)
.exit();
}
}
}
options.sort = match matches.value_of(OPT_SORT) {
Some(OPT_SORT_NAME) => ddbug::Sort::Name,
Some(OPT_SORT_SIZE) => ddbug::Sort::Size,
Some(value) => clap::Error::with_description(
&format!("invalid {} key: {}", OPT_SORT, value),
clap::ErrorKind::InvalidValue,
)
.exit(),
_ => ddbug::Sort::None,
};
if let Some(values) = matches.values_of(OPT_IGNORE) {
for value in values {
match value {
OPT_IGNORE_ADDED => options.ignore_added = true,
OPT_IGNORE_DELETED => options.ignore_deleted = true,
OPT_IGNORE_ADDRESS => {
options.ignore_function_address = true;
options.ignore_variable_address = true;
}
OPT_IGNORE_SYMBOL_NAME => {
options.ignore_function_symbol_name = true;
options.ignore_variable_symbol_name = true;
}
OPT_IGNORE_FUNCTION_ADDRESS => options.ignore_function_address = true,
OPT_IGNORE_FUNCTION_SIZE => options.ignore_function_size = true,
OPT_IGNORE_FUNCTION_INLINE => options.ignore_function_inline = true,
OPT_IGNORE_FUNCTION_SYMBOL_NAME => options.ignore_function_symbol_name = true,
OPT_IGNORE_VARIABLE_ADDRESS => options.ignore_variable_address = true,
OPT_IGNORE_VARIABLE_SYMBOL_NAME => options.ignore_variable_symbol_name = true,
_ => clap::Error::with_description(
&format!("invalid {} value: {}", OPT_IGNORE, value),
clap::ErrorKind::InvalidValue,
)
.exit(),
}
}
}
if let Some(values) = matches.values_of(OPT_PREFIX_MAP) {
for value in values {
if let Some(index) = value.bytes().position(|c| c == b'=') {
let old = &value[..index];
let new = &value[index + 1..];
options.prefix_map.push((old, new));
} else {
clap::Error::with_description(
&format!("invalid {} value: {}", OPT_PREFIX_MAP, value),
clap::ErrorKind::InvalidValue,
)
.exit();
}
}
options.prefix_map.sort_by(|a, b| b.0.len().cmp(&a.0.len()));
}
if let Some(mut paths) = matches.values_of(OPT_DIFF) {
let path_a = paths.next().unwrap();
let path_b = paths.next().unwrap();
if let Err(e) = ddbug::File::parse(path_a, |file_a| {
if let Err(e) = ddbug::File::parse(path_b, |file_b| diff_file(file_a, file_b, &options))
{
error!("{}: {}", path_b, e);
}
Ok(())
}) {
error!("{}: {}", path_a, e);
}
} else {
let path = matches.value_of(OPT_FILE).unwrap();
if let Err(e) = ddbug::File::parse(path, |file| print_file(file, &options)) {
error!("{}: {}", path, e);
}
}
}
fn diff_file(
file_a: &ddbug::File,
file_b: &ddbug::File,
options: &ddbug::Options,
) -> ddbug::Result<()> {
format(options, |printer| {
if let Err(e) = ddbug::diff(printer, file_a, file_b, options) {
error!("{}", e);
}
Ok(())
})
}
fn print_file(file: &ddbug::File, options: &ddbug::Options) -> ddbug::Result<()> {
format(options, |printer| ddbug::print(file, printer, options))
}
fn format<F>(options: &ddbug::Options, f: F) -> ddbug::Result<()>
where
F: FnOnce(&mut dyn ddbug::Printer) -> ddbug::Result<()>,
{
let stdout = std::io::stdout();
let mut writer = BufWriter::new(stdout.lock());
if options.html {
let mut printer = ddbug::HtmlPrinter::new(&mut writer, options);
printer.begin()?;
f(&mut printer)?;
printer.end()
} else {
let mut printer = ddbug::TextPrinter::new(&mut writer, options);
f(&mut printer)
}
}
| 37.945175 | 100 | 0.533665 |
64079a03849ff8e74979a23a9255988eff79679f | 3,986 | use serde::{Serialize, Serializer};
use wasm_bindgen::JsValue;
use super::selector::Selector;
use crate::events::SequenceID;
#[derive(PartialEq, Eq, Debug)]
pub enum Timeout {
None,
Default,
Duration(std::time::Duration),
}
impl Timeout {
pub(crate) fn is_default(&self) -> bool {
self == &Self::Default
}
}
impl Default for Timeout {
fn default() -> Self {
Self::Default
}
}
impl Serialize for Timeout {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::None => serializer.serialize_bool(false),
Self::Default => panic!("Tried to serialize default value"),
Self::Duration(duration) => serializer.serialize_f64(duration.as_secs_f64() * 1000.0),
}
}
}
/// All options default to false unless otherwise specified.
#[derive(Serialize, Default, Debug)]
pub struct Changes {
/// Include the associated document with each change.
#[serde(skip_serializing_if = "std::ops::Not::not")]
pub include_docs: bool,
/// Include conflicts.
#[serde(skip_serializing_if = "std::ops::Not::not")]
pub conflicts: bool,
/// Include attachments.
#[serde(skip_serializing_if = "std::ops::Not::not")]
pub attachments: bool,
/// Reverse the order of the output documents.
#[serde(skip_serializing_if = "std::ops::Not::not")]
pub descending: bool,
/// Start the results from the change immediately after the given sequence
/// number. You can also pass `"now"` if you want only new changes (when [live] is true).
#[serde(skip_serializing)]
pub since: Option<SequenceID>,
/// Limit the number of results to this number.
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<u32>,
/// Request timeout (in milliseconds).
#[serde(skip_serializing_if = "Timeout::is_default")]
pub timeout: Timeout,
/// For http adapter only, time in milliseconds for server to give a heartbeat to
/// keep long connections open. Defaults to 10000 (10 seconds).
#[serde(skip_serializing_if = "Timeout::is_default")]
pub heartbeat: Timeout,
/// Reference a filter function from a design document to selectively get updates.
/// To use a view function, pass `_view` here and provide a reference to the view
/// function in [view].
#[serde(skip_serializing_if = "Option::is_none")]
pub filter: Option<String>,
/// Only show changes for docs with these ids.
#[serde(skip_serializing_if = "Vec::is_empty")]
pub doc_ids: Vec<String>,
/// Object containing properties that are passed to the filter function, e.g.
/// `{"foo:"bar"}`, where `"bar"` will be available in the filter function as
/// `params.query.foo`. To access the params, define your filter function like
/// `function (doc, params) {/* ... */}`.
#[serde(skip_serializing)]
pub query_params: Option<JsValue>,
/// Specify a view function (e.g. `"design_doc_name/view_name"` or `"view_name"` as
/// shorthand for `"view_name/view_name"`) to act as a filter. Documents counted as
/// “passed” for a view filter if a map function emits at least one record for them.
/// Note: [filter] must be set to `"_view"` for this option to work.
#[serde(skip_serializing_if = "Option::is_none")]
pub view: Option<String>,
/// Filter using a query/pouchdb-find selector. Note: Selectors are not supported
/// in CouchDB 1.x. Cannot be used in combination with the filter option.
#[serde(skip_serializing_if = "Option::is_none")]
pub selector: Option<Selector>,
/// Only available for http databases, this configures how many changes to fetch
/// at a time. Increasing this can reduce the number of requests made. Default is 25.
#[serde(skip_serializing_if = "Option::is_none")]
pub batch_size: Option<u32>,
// some options are skipped, because they're not useful right now.
}
| 40.673469 | 98 | 0.66583 |
f4f12140531822780951b58c7cbf530f7551aed9 | 24,495 | //! General-purpose I/O port pins.
//!
//! For STM32F3 Series of mixed-signal MCUs with DSP and FPU instructions.
use drone_core::periph;
use drone_cortexm::reg::marker::*;
periph! {
/// Generic GPIO pin peripheral variant.
pub trait GpioPinMap {
/// GPIO port head peripheral variant.
type GpioHeadMap: super::super::head::GpioHeadMap;
}
/// Generic GPIO pin peripheral.
pub struct GpioPinPeriph;
GPIO {
MODER {
0x20 RwReg Shared;
MODER { RwRwRegFieldBits }
}
OTYPER {
0x20 RwReg Shared;
OT { RwRwRegFieldBit }
}
OSPEEDR {
0x20 RwReg Shared;
OSPEEDR { RwRwRegFieldBits }
}
PUPDR {
0x20 RwReg Shared;
PUPDR { RwRwRegFieldBits }
}
IDR {
0x20 RoReg Shared;
IDR { RoRoRegFieldBit }
}
ODR {
0x20 RwReg Shared;
ODR { RwRwRegFieldBit }
}
BSRR {
0x20 WoReg Shared;
BR { WoWoRegFieldBit }
BS { WoWoRegFieldBit }
}
LCKR {
0x20 RwReg Shared;
LCK { RwRwRegFieldBit }
}
AFR {
0x20 RwReg Shared;
AFR { RwRwRegFieldBits }
}
BRR {
0x20 WoReg Shared;
BR { WoWoRegFieldBit }
}
}
}
macro_rules! map_gpio_pin {
(
$port_ty:ident,
$pin_macro_doc:expr,
$pin_macro:ident,
$pin_ty_doc:expr,
$pin_ty:ident,
$gpio:ident,
$moder_ty:ident,
$ot_ty:ident,
$ospeedr_ty:ident,
$pupdr_ty:ident,
$idr_ty:ident,
$odr_ty:ident,
$br_ty:ident,
$bs_ty:ident,
$lck_ty:ident,
$afr_path:ident,
$afr:ident,
$brr_ty:ident,
) => {
periph::map! {
#[doc = $pin_macro_doc]
pub macro $pin_macro;
#[doc = $pin_ty_doc]
pub struct $pin_ty;
impl GpioPinMap for $pin_ty {
type GpioHeadMap = super::super::head::$port_ty;
}
drone_stm32_map_pieces::reg;
crate::pin;
GPIO {
$gpio;
MODER {
MODER Shared;
MODER { $moder_ty }
}
OTYPER {
OTYPER Shared;
OT { $ot_ty }
}
OSPEEDR {
OSPEEDR Shared;
OSPEEDR { $ospeedr_ty }
}
PUPDR {
PUPDR Shared;
PUPDR { $pupdr_ty }
}
IDR {
IDR Shared;
IDR { $idr_ty }
}
ODR {
ODR Shared;
ODR { $odr_ty }
}
BSRR {
BSRR Shared;
BR { $br_ty }
BS { $bs_ty }
}
LCKR {
LCKR Shared;
LCK { $lck_ty }
}
AFR {
$afr_path Shared;
AFR { $afr }
}
BRR {
BRR Shared;
BR { $brr_ty }
}
}
}
};
}
macro_rules! map_gpio_pins {
(
$port_ty:ident,
$pin0_macro_doc:expr,
$pin0_macro:ident,
$pin0_ty_doc:expr,
$pin0_ty:ident,
$pin1_macro_doc:expr,
$pin1_macro:ident,
$pin1_ty_doc:expr,
$pin1_ty:ident,
$pin2_macro_doc:expr,
$pin2_macro:ident,
$pin2_ty_doc:expr,
$pin2_ty:ident,
$pin3_macro_doc:expr,
$pin3_macro:ident,
$pin3_ty_doc:expr,
$pin3_ty:ident,
$pin4_macro_doc:expr,
$pin4_macro:ident,
$pin4_ty_doc:expr,
$pin4_ty:ident,
$pin5_macro_doc:expr,
$pin5_macro:ident,
$pin5_ty_doc:expr,
$pin5_ty:ident,
$pin6_macro_doc:expr,
$pin6_macro:ident,
$pin6_ty_doc:expr,
$pin6_ty:ident,
$pin7_macro_doc:expr,
$pin7_macro:ident,
$pin7_ty_doc:expr,
$pin7_ty:ident,
$pin8_macro_doc:expr,
$pin8_macro:ident,
$pin8_ty_doc:expr,
$pin8_ty:ident,
$pin9_macro_doc:expr,
$pin9_macro:ident,
$pin9_ty_doc:expr,
$pin9_ty:ident,
$pin10_macro_doc:expr,
$pin10_macro:ident,
$pin10_ty_doc:expr,
$pin10_ty:ident,
$pin11_macro_doc:expr,
$pin11_macro:ident,
$pin11_ty_doc:expr,
$pin11_ty:ident,
$pin12_macro_doc:expr,
$pin12_macro:ident,
$pin12_ty_doc:expr,
$pin12_ty:ident,
$pin13_macro_doc:expr,
$pin13_macro:ident,
$pin13_ty_doc:expr,
$pin13_ty:ident,
$pin14_macro_doc:expr,
$pin14_macro:ident,
$pin14_ty_doc:expr,
$pin14_ty:ident,
$pin15_macro_doc:expr,
$pin15_macro:ident,
$pin15_ty_doc:expr,
$pin15_ty:ident,
$gpio:ident,
) => {
map_gpio_pin! {
$port_ty,
$pin0_macro_doc,
$pin0_macro,
$pin0_ty_doc,
$pin0_ty,
$gpio,
MODER0,
OT0,
OSPEEDR0,
PUPDR0,
IDR0,
ODR0,
BR0,
BS0,
LCK0,
AFRL,
AFRL0,
BR0,
}
map_gpio_pin! {
$port_ty,
$pin1_macro_doc,
$pin1_macro,
$pin1_ty_doc,
$pin1_ty,
$gpio,
MODER1,
OT1,
OSPEEDR1,
PUPDR1,
IDR1,
ODR1,
BR1,
BS1,
LCK1,
AFRL,
AFRL0,
BR1,
}
map_gpio_pin! {
$port_ty,
$pin2_macro_doc,
$pin2_macro,
$pin2_ty_doc,
$pin2_ty,
$gpio,
MODER2,
OT2,
OSPEEDR2,
PUPDR2,
IDR2,
ODR2,
BR2,
BS2,
LCK2,
AFRL,
AFRL2,
BR2,
}
map_gpio_pin! {
$port_ty,
$pin3_macro_doc,
$pin3_macro,
$pin3_ty_doc,
$pin3_ty,
$gpio,
MODER3,
OT3,
OSPEEDR3,
PUPDR3,
IDR3,
ODR3,
BR3,
BS3,
LCK3,
AFRL,
AFRL3,
BR3,
}
map_gpio_pin! {
$port_ty,
$pin4_macro_doc,
$pin4_macro,
$pin4_ty_doc,
$pin4_ty,
$gpio,
MODER4,
OT4,
OSPEEDR4,
PUPDR4,
IDR4,
ODR4,
BR4,
BS4,
LCK4,
AFRL,
AFRL4,
BR4,
}
map_gpio_pin! {
$port_ty,
$pin5_macro_doc,
$pin5_macro,
$pin5_ty_doc,
$pin5_ty,
$gpio,
MODER5,
OT5,
OSPEEDR5,
PUPDR5,
IDR5,
ODR5,
BR5,
BS5,
LCK5,
AFRL,
AFRL5,
BR5,
}
map_gpio_pin! {
$port_ty,
$pin6_macro_doc,
$pin6_macro,
$pin6_ty_doc,
$pin6_ty,
$gpio,
MODER6,
OT6,
OSPEEDR6,
PUPDR6,
IDR6,
ODR6,
BR6,
BS6,
LCK6,
AFRL,
AFRL6,
BR6,
}
map_gpio_pin! {
$port_ty,
$pin7_macro_doc,
$pin7_macro,
$pin7_ty_doc,
$pin7_ty,
$gpio,
MODER7,
OT7,
OSPEEDR7,
PUPDR7,
IDR7,
ODR7,
BR7,
BS7,
LCK7,
AFRL,
AFRL7,
BR7,
}
map_gpio_pin! {
$port_ty,
$pin8_macro_doc,
$pin8_macro,
$pin8_ty_doc,
$pin8_ty,
$gpio,
MODER8,
OT8,
OSPEEDR8,
PUPDR8,
IDR8,
ODR8,
BR8,
BS8,
LCK8,
AFRH,
AFRH8,
BR8,
}
map_gpio_pin! {
$port_ty,
$pin9_macro_doc,
$pin9_macro,
$pin9_ty_doc,
$pin9_ty,
$gpio,
MODER9,
OT9,
OSPEEDR9,
PUPDR9,
IDR9,
ODR9,
BR9,
BS9,
LCK9,
AFRH,
AFRH9,
BR9,
}
map_gpio_pin! {
$port_ty,
$pin10_macro_doc,
$pin10_macro,
$pin10_ty_doc,
$pin10_ty,
$gpio,
MODER10,
OT10,
OSPEEDR10,
PUPDR10,
IDR10,
ODR10,
BR10,
BS10,
LCK10,
AFRH,
AFRH10,
BR10,
}
map_gpio_pin! {
$port_ty,
$pin11_macro_doc,
$pin11_macro,
$pin11_ty_doc,
$pin11_ty,
$gpio,
MODER11,
OT11,
OSPEEDR11,
PUPDR11,
IDR11,
ODR11,
BR11,
BS11,
LCK11,
AFRH,
AFRH11,
BR11,
}
map_gpio_pin! {
$port_ty,
$pin12_macro_doc,
$pin12_macro,
$pin12_ty_doc,
$pin12_ty,
$gpio,
MODER12,
OT12,
OSPEEDR12,
PUPDR12,
IDR12,
ODR12,
BR12,
BS12,
LCK12,
AFRH,
AFRH12,
BR12,
}
map_gpio_pin! {
$port_ty,
$pin13_macro_doc,
$pin13_macro,
$pin13_ty_doc,
$pin13_ty,
$gpio,
MODER13,
OT13,
OSPEEDR13,
PUPDR13,
IDR13,
ODR13,
BR13,
BS13,
LCK13,
AFRH,
AFRH13,
BR13,
}
map_gpio_pin! {
$port_ty,
$pin14_macro_doc,
$pin14_macro,
$pin14_ty_doc,
$pin14_ty,
$gpio,
MODER14,
OT14,
OSPEEDR14,
PUPDR14,
IDR14,
ODR14,
BR14,
BS14,
LCK14,
AFRH,
AFRH14,
BR14,
}
map_gpio_pin! {
$port_ty,
$pin15_macro_doc,
$pin15_macro,
$pin15_ty_doc,
$pin15_ty,
$gpio,
MODER15,
OT15,
OSPEEDR15,
PUPDR15,
IDR15,
ODR15,
BR15,
BS15,
LCK15,
AFRH,
AFRH15,
BR15,
}
};
}
map_gpio_pins! {
GpioAHead,
"Extracts GPIO port A pin 0 register tokens.",
periph_gpio_a0,
"GPIO port A pin 0 peripheral variant.",
GpioA0,
"Extracts GPIO port A pin 1 register tokens.",
periph_gpio_a1,
"GPIO port A pin 1 peripheral variant.",
GpioA1,
"Extracts GPIO port A pin 2 register tokens.",
periph_gpio_a2,
"GPIO port A pin 2 peripheral variant.",
GpioA2,
"Extracts GPIO port A pin 3 register tokens.",
periph_gpio_a3,
"GPIO port A pin 3 peripheral variant.",
GpioA3,
"Extracts GPIO port A pin 4 register tokens.",
periph_gpio_a4,
"GPIO port A pin 4 peripheral variant.",
GpioA4,
"Extracts GPIO port A pin 5 register tokens.",
periph_gpio_a5,
"GPIO port A pin 5 peripheral variant.",
GpioA5,
"Extracts GPIO port A pin 6 register tokens.",
periph_gpio_a6,
"GPIO port A pin 6 peripheral variant.",
GpioA6,
"Extracts GPIO port A pin 7 register tokens.",
periph_gpio_a7,
"GPIO port A pin 7 peripheral variant.",
GpioA7,
"Extracts GPIO port A pin 8 register tokens.",
periph_gpio_a8,
"GPIO port A pin 8 peripheral variant.",
GpioA8,
"Extracts GPIO port A pin 9 register tokens.",
periph_gpio_a9,
"GPIO port A pin 9 peripheral variant.",
GpioA9,
"Extracts GPIO port A pin 10 register tokens.",
periph_gpio_a10,
"GPIO port A pin 10 peripheral variant.",
GpioA10,
"Extracts GPIO port A pin 11 register tokens.",
periph_gpio_a11,
"GPIO port A pin 11 peripheral variant.",
GpioA11,
"Extracts GPIO port A pin 12 register tokens.",
periph_gpio_a12,
"GPIO port A pin 12 peripheral variant.",
GpioA12,
"Extracts GPIO port A pin 13 register tokens.",
periph_gpio_a13,
"GPIO port A pin 13 peripheral variant.",
GpioA13,
"Extracts GPIO port A pin 14 register tokens.",
periph_gpio_a14,
"GPIO port A pin 14 peripheral variant.",
GpioA14,
"Extracts GPIO port A pin 15 register tokens.",
periph_gpio_a15,
"GPIO port A pin 15 peripheral variant.",
GpioA15,
GPIOA,
}
map_gpio_pins! {
GpioBHead,
"Extracts GPIO port B pin 0 register tokens.",
periph_gpio_b0,
"GPIO port B pin 0 peripheral variant.",
GpioB0,
"Extracts GPIO port B pin 1 register tokens.",
periph_gpio_b1,
"GPIO port B pin 1 peripheral variant.",
GpioB1,
"Extracts GPIO port B pin 2 register tokens.",
periph_gpio_b2,
"GPIO port B pin 2 peripheral variant.",
GpioB2,
"Extracts GPIO port B pin 3 register tokens.",
periph_gpio_b3,
"GPIO port B pin 3 peripheral variant.",
GpioB3,
"Extracts GPIO port B pin 4 register tokens.",
periph_gpio_b4,
"GPIO port B pin 4 peripheral variant.",
GpioB4,
"Extracts GPIO port B pin 5 register tokens.",
periph_gpio_b5,
"GPIO port B pin 5 peripheral variant.",
GpioB5,
"Extracts GPIO port B pin 6 register tokens.",
periph_gpio_b6,
"GPIO port B pin 6 peripheral variant.",
GpioB6,
"Extracts GPIO port B pin 7 register tokens.",
periph_gpio_b7,
"GPIO port B pin 7 peripheral variant.",
GpioB7,
"Extracts GPIO port B pin 8 register tokens.",
periph_gpio_b8,
"GPIO port B pin 8 peripheral variant.",
GpioB8,
"Extracts GPIO port B pin 9 register tokens.",
periph_gpio_b9,
"GPIO port B pin 9 peripheral variant.",
GpioB9,
"Extracts GPIO port B pin 10 register tokens.",
periph_gpio_b10,
"GPIO port B pin 10 peripheral variant.",
GpioB10,
"Extracts GPIO port B pin 11 register tokens.",
periph_gpio_b11,
"GPIO port B pin 11 peripheral variant.",
GpioB11,
"Extracts GPIO port B pin 12 register tokens.",
periph_gpio_b12,
"GPIO port B pin 12 peripheral variant.",
GpioB12,
"Extracts GPIO port B pin 13 register tokens.",
periph_gpio_b13,
"GPIO port B pin 13 peripheral variant.",
GpioB13,
"Extracts GPIO port B pin 14 register tokens.",
periph_gpio_b14,
"GPIO port B pin 14 peripheral variant.",
GpioB14,
"Extracts GPIO port B pin 15 register tokens.",
periph_gpio_b15,
"GPIO port B pin 15 peripheral variant.",
GpioB15,
GPIOB,
}
map_gpio_pins! {
GpioCHead,
"Extracts GPIO port C pin 0 register tokens.",
periph_gpio_c0,
"GPIO port C pin 0 peripheral variant.",
GpioC0,
"Extracts GPIO port C pin 1 register tokens.",
periph_gpio_c1,
"GPIO port C pin 1 peripheral variant.",
GpioC1,
"Extracts GPIO port C pin 2 register tokens.",
periph_gpio_c2,
"GPIO port C pin 2 peripheral variant.",
GpioC2,
"Extracts GPIO port C pin 3 register tokens.",
periph_gpio_c3,
"GPIO port C pin 3 peripheral variant.",
GpioC3,
"Extracts GPIO port C pin 4 register tokens.",
periph_gpio_c4,
"GPIO port C pin 4 peripheral variant.",
GpioC4,
"Extracts GPIO port C pin 5 register tokens.",
periph_gpio_c5,
"GPIO port C pin 5 peripheral variant.",
GpioC5,
"Extracts GPIO port C pin 6 register tokens.",
periph_gpio_c6,
"GPIO port C pin 6 peripheral variant.",
GpioC6,
"Extracts GPIO port C pin 7 register tokens.",
periph_gpio_c7,
"GPIO port C pin 7 peripheral variant.",
GpioC7,
"Extracts GPIO port C pin 8 register tokens.",
periph_gpio_c8,
"GPIO port C pin 8 peripheral variant.",
GpioC8,
"Extracts GPIO port C pin 9 register tokens.",
periph_gpio_c9,
"GPIO port C pin 9 peripheral variant.",
GpioC9,
"Extracts GPIO port C pin 10 register tokens.",
periph_gpio_c10,
"GPIO port C pin 10 peripheral variant.",
GpioC10,
"Extracts GPIO port C pin 11 register tokens.",
periph_gpio_c11,
"GPIO port C pin 11 peripheral variant.",
GpioC11,
"Extracts GPIO port C pin 12 register tokens.",
periph_gpio_c12,
"GPIO port C pin 12 peripheral variant.",
GpioC12,
"Extracts GPIO port C pin 13 register tokens.",
periph_gpio_c13,
"GPIO port C pin 13 peripheral variant.",
GpioC13,
"Extracts GPIO port C pin 14 register tokens.",
periph_gpio_c14,
"GPIO port C pin 14 peripheral variant.",
GpioC14,
"Extracts GPIO port C pin 15 register tokens.",
periph_gpio_c15,
"GPIO port C pin 15 peripheral variant.",
GpioC15,
GPIOC,
}
map_gpio_pins! {
GpioDHead,
"Extracts GPIO port D pin 0 register tokens.",
periph_gpio_d0,
"GPIO port D pin 0 peripheral variant.",
GpioD0,
"Extracts GPIO port D pin 1 register tokens.",
periph_gpio_d1,
"GPIO port D pin 1 peripheral variant.",
GpioD1,
"Extracts GPIO port D pin 2 register tokens.",
periph_gpio_d2,
"GPIO port D pin 2 peripheral variant.",
GpioD2,
"Extracts GPIO port D pin 3 register tokens.",
periph_gpio_d3,
"GPIO port D pin 3 peripheral variant.",
GpioD3,
"Extracts GPIO port D pin 4 register tokens.",
periph_gpio_d4,
"GPIO port D pin 4 peripheral variant.",
GpioD4,
"Extracts GPIO port D pin 5 register tokens.",
periph_gpio_d5,
"GPIO port D pin 5 peripheral variant.",
GpioD5,
"Extracts GPIO port D pin 6 register tokens.",
periph_gpio_d6,
"GPIO port D pin 6 peripheral variant.",
GpioD6,
"Extracts GPIO port D pin 7 register tokens.",
periph_gpio_d7,
"GPIO port D pin 7 peripheral variant.",
GpioD7,
"Extracts GPIO port D pin 8 register tokens.",
periph_gpio_d8,
"GPIO port D pin 8 peripheral variant.",
GpioD8,
"Extracts GPIO port D pin 9 register tokens.",
periph_gpio_d9,
"GPIO port D pin 9 peripheral variant.",
GpioD9,
"Extracts GPIO port D pin 10 register tokens.",
periph_gpio_d10,
"GPIO port D pin 10 peripheral variant.",
GpioD10,
"Extracts GPIO port D pin 11 register tokens.",
periph_gpio_d11,
"GPIO port D pin 11 peripheral variant.",
GpioD11,
"Extracts GPIO port D pin 12 register tokens.",
periph_gpio_d12,
"GPIO port D pin 12 peripheral variant.",
GpioD12,
"Extracts GPIO port D pin 13 register tokens.",
periph_gpio_d13,
"GPIO port D pin 13 peripheral variant.",
GpioD13,
"Extracts GPIO port D pin 14 register tokens.",
periph_gpio_d14,
"GPIO port D pin 14 peripheral variant.",
GpioD14,
"Extracts GPIO port D pin 15 register tokens.",
periph_gpio_d15,
"GPIO port D pin 15 peripheral variant.",
GpioD15,
GPIOD,
}
map_gpio_pins! {
GpioEHead,
"Extracts GPIO port E pin 0 register tokens.",
periph_gpio_e0,
"GPIO port E pin 0 peripheral variant.",
GpioE0,
"Extracts GPIO port E pin 1 register tokens.",
periph_gpio_e1,
"GPIO port E pin 1 peripheral variant.",
GpioE1,
"Extracts GPIO port E pin 2 register tokens.",
periph_gpio_e2,
"GPIO port E pin 2 peripheral variant.",
GpioE2,
"Extracts GPIO port E pin 3 register tokens.",
periph_gpio_e3,
"GPIO port E pin 3 peripheral variant.",
GpioE3,
"Extracts GPIO port E pin 4 register tokens.",
periph_gpio_e4,
"GPIO port E pin 4 peripheral variant.",
GpioE4,
"Extracts GPIO port E pin 5 register tokens.",
periph_gpio_e5,
"GPIO port E pin 5 peripheral variant.",
GpioE5,
"Extracts GPIO port E pin 6 register tokens.",
periph_gpio_e6,
"GPIO port E pin 6 peripheral variant.",
GpioE6,
"Extracts GPIO port E pin 7 register tokens.",
periph_gpio_e7,
"GPIO port E pin 7 peripheral variant.",
GpioE7,
"Extracts GPIO port E pin 8 register tokens.",
periph_gpio_e8,
"GPIO port E pin 8 peripheral variant.",
GpioE8,
"Extracts GPIO port E pin 9 register tokens.",
periph_gpio_e9,
"GPIO port E pin 9 peripheral variant.",
GpioE9,
"Extracts GPIO port E pin 10 register tokens.",
periph_gpio_e10,
"GPIO port E pin 10 peripheral variant.",
GpioE10,
"Extracts GPIO port E pin 11 register tokens.",
periph_gpio_e11,
"GPIO port E pin 11 peripheral variant.",
GpioE11,
"Extracts GPIO port E pin 12 register tokens.",
periph_gpio_e12,
"GPIO port E pin 12 peripheral variant.",
GpioE12,
"Extracts GPIO port E pin 13 register tokens.",
periph_gpio_e13,
"GPIO port E pin 13 peripheral variant.",
GpioE13,
"Extracts GPIO port E pin 14 register tokens.",
periph_gpio_e14,
"GPIO port E pin 14 peripheral variant.",
GpioE14,
"Extracts GPIO port E pin 15 register tokens.",
periph_gpio_e15,
"GPIO port E pin 15 peripheral variant.",
GpioE15,
GPIOE,
}
map_gpio_pins! {
GpioFHead,
"Extracts GPIO port F pin 0 register tokens.",
periph_gpio_f0,
"GPIO port F pin 0 peripheral variant.",
GpioF0,
"Extracts GPIO port F pin 1 register tokens.",
periph_gpio_f1,
"GPIO port F pin 1 peripheral variant.",
GpioF1,
"Extracts GPIO port F pin 2 register tokens.",
periph_gpio_f2,
"GPIO port F pin 2 peripheral variant.",
GpioF2,
"Extracts GPIO port F pin 3 register tokens.",
periph_gpio_f3,
"GPIO port F pin 3 peripheral variant.",
GpioF3,
"Extracts GPIO port F pin 4 register tokens.",
periph_gpio_f4,
"GPIO port F pin 4 peripheral variant.",
GpioF4,
"Extracts GPIO port F pin 5 register tokens.",
periph_gpio_f5,
"GPIO port F pin 5 peripheral variant.",
GpioF5,
"Extracts GPIO port F pin 6 register tokens.",
periph_gpio_f6,
"GPIO port F pin 6 peripheral variant.",
GpioF6,
"Extracts GPIO port F pin 7 register tokens.",
periph_gpio_f7,
"GPIO port F pin 7 peripheral variant.",
GpioF7,
"Extracts GPIO port F pin 8 register tokens.",
periph_gpio_f8,
"GPIO port F pin 8 peripheral variant.",
GpioF8,
"Extracts GPIO port F pin 9 register tokens.",
periph_gpio_f9,
"GPIO port F pin 9 peripheral variant.",
GpioF9,
"Extracts GPIO port F pin 10 register tokens.",
periph_gpio_f10,
"GPIO port F pin 10 peripheral variant.",
GpioF10,
"Extracts GPIO port F pin 11 register tokens.",
periph_gpio_f11,
"GPIO port F pin 11 peripheral variant.",
GpioF11,
"Extracts GPIO port F pin 12 register tokens.",
periph_gpio_f12,
"GPIO port F pin 12 peripheral variant.",
GpioF12,
"Extracts GPIO port F pin 13 register tokens.",
periph_gpio_f13,
"GPIO port F pin 13 peripheral variant.",
GpioF13,
"Extracts GPIO port F pin 14 register tokens.",
periph_gpio_f14,
"GPIO port F pin 14 peripheral variant.",
GpioF14,
"Extracts GPIO port F pin 15 register tokens.",
periph_gpio_f15,
"GPIO port F pin 15 peripheral variant.",
GpioF15,
GPIOF,
}
| 25.757098 | 74 | 0.523944 |
0e1c507e994be3c58039e03c84f58ac1d482796f | 3,170 | // Copyright 2019. The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use std::fmt::{Display, Formatter};
use serde::{Deserialize, Serialize};
use crate::{
blocks::{new_blockheader_template::NewBlockHeaderTemplate, Block},
proof_of_work::Difficulty,
transactions::{aggregated_body::AggregateBody, tari_amount::MicroTari},
};
/// The new block template is used constructing a new partial block, allowing a miner to added the coinbase utxo and as
/// a final step the Base node to add the MMR roots to the header.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NewBlockTemplate {
pub header: NewBlockHeaderTemplate,
pub body: AggregateBody,
pub target_difficulty: Difficulty,
pub reward: MicroTari,
pub total_fees: MicroTari,
}
impl NewBlockTemplate {
pub fn from_block(block: Block, target_difficulty: Difficulty, reward: MicroTari) -> Self {
let Block { header, body } = block;
let total_fees = body.get_total_fee();
Self {
header: NewBlockHeaderTemplate::from_header(header),
body,
target_difficulty,
reward,
total_fees,
}
}
}
impl Display for NewBlockTemplate {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
writeln!(f, "----------------- Block template-----------------")?;
writeln!(f, "--- Header ---")?;
writeln!(f, "{}", self.header)?;
writeln!(f, "--- Body ---")?;
writeln!(f, "{}", self.body)?;
writeln!(
f,
"Target difficulty: {}\nReward: {}\nTotal fees: {}",
self.target_difficulty, self.reward, self.total_fees
)?;
Ok(())
}
}
| 43.424658 | 119 | 0.691167 |
b98fcdcf061b74c1b8349baba5edd6517cf4e0a7 | 3,503 | #[doc = "Reader of register PUBLISH_USBREMOVED"]
pub type R = crate::R<u32, super::PUBLISH_USBREMOVED>;
#[doc = "Writer for register PUBLISH_USBREMOVED"]
pub type W = crate::W<u32, super::PUBLISH_USBREMOVED>;
#[doc = "Register PUBLISH_USBREMOVED `reset()`'s with value 0"]
impl crate::ResetValue for super::PUBLISH_USBREMOVED {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CHIDX`"]
pub type CHIDX_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CHIDX`"]
pub struct CHIDX_W<'a> {
w: &'a mut W,
}
impl<'a> CHIDX_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
#[doc = "\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EN_A {
#[doc = "0: Disable publishing"]
DISABLED,
#[doc = "1: Enable publishing"]
ENABLED,
}
impl From<EN_A> for bool {
#[inline(always)]
fn from(variant: EN_A) -> Self {
match variant {
EN_A::DISABLED => false,
EN_A::ENABLED => true,
}
}
}
#[doc = "Reader of field `EN`"]
pub type EN_R = crate::R<bool, EN_A>;
impl EN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EN_A {
match self.bits {
false => EN_A::DISABLED,
true => EN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == EN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == EN_A::ENABLED
}
}
#[doc = "Write proxy for field `EN`"]
pub struct EN_W<'a> {
w: &'a mut W,
}
impl<'a> EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disable publishing"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(EN_A::DISABLED)
}
#[doc = "Enable publishing"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(EN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Channel that event USBREMOVED will publish to."]
#[inline(always)]
pub fn chidx(&self) -> CHIDX_R {
CHIDX_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn en(&self) -> EN_R {
EN_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:7 - Channel that event USBREMOVED will publish to."]
#[inline(always)]
pub fn chidx(&mut self) -> CHIDX_W {
CHIDX_W { w: self }
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn en(&mut self) -> EN_W {
EN_W { w: self }
}
}
| 27.155039 | 86 | 0.544105 |
e278ae119d9f26e92daa135117ac145330110978 | 1,674 | mod app;
mod app_builder;
mod event;
mod plugin;
mod plugin_group;
mod schedule_runner;
pub use app::*;
pub use app_builder::*;
pub use bevy_derive::DynamicPlugin;
pub use event::*;
pub use plugin::*;
pub use plugin_group::*;
pub use schedule_runner::*;
pub mod prelude {
pub use crate::{
app::App,
app_builder::AppBuilder,
event::{EventReader, Events},
CoreStage, DynamicPlugin, Plugin, PluginGroup, StartupStage,
};
}
use bevy_ecs::StageLabel;
/// The names of the default App stages
#[derive(Debug, Hash, PartialEq, Eq, Clone, StageLabel)]
pub enum CoreStage {
/// Runs once at the beginning of the app.
Startup,
/// Name of app stage that runs before all other app stages
First,
/// Name of app stage that runs before EVENT
PreEvent,
/// Name of app stage that updates events. Runs before UPDATE
Event,
/// Name of app stage responsible for performing setup before an update. Runs before UPDATE.
PreUpdate,
/// Name of app stage responsible for doing most app logic. Systems should be registered here by default.
Update,
/// Name of app stage responsible for processing the results of UPDATE. Runs after UPDATE.
PostUpdate,
/// Name of app stage that runs after all other app stages
Last,
}
/// The names of the default App startup stages
#[derive(Debug, Hash, PartialEq, Eq, Clone, StageLabel)]
pub enum StartupStage {
/// Name of app stage that runs once before the startup stage
PreStartup,
/// Name of app stage that runs once when an app starts up
Startup,
/// Name of app stage that runs once after the startup stage
PostStartup,
}
| 29.368421 | 109 | 0.692354 |
fc93b2d12dcfb3d57108b27b280cebcf8a305983 | 5,952 | use crate::read_lines;
use std::collections::{HashMap, HashSet};
use integer_sqrt::IntegerSquareRoot;
use nalgebra as na;
use once_cell::sync::Lazy;
type V = na::Vector3<isize>;
type A = na::Point3<isize>;
type R = na::Matrix3<isize>;
#[derive(Clone)]
struct Scanner {
beacons: Vec<V>,
origin: A,
dist_map: HashMap<V, Vec<(usize, usize)>>,
}
fn sort_dist(x: &V, y: &V) -> V {
let mut r: Vec<_> = x.iter().zip(y.iter()).map(|(a, b)| (a - b).abs()).collect();
r.sort();
V::new(r[0], r[1], r[2])
}
static ORIENTS: Lazy<Vec<na::Matrix3<isize>>> = Lazy::new(|| {
let mut orients = vec![];
for x in -3..=3isize {
for y in -3..=3isize {
for z in -3..=3isize {
if x == 0 || y == 0 || z == 0 {
continue;
}
let xa = x.abs() as usize;
let ya = y.abs() as usize;
let za = z.abs() as usize;
if xa == ya || ya == za || za == xa {
continue;
}
let mut m = na::Matrix3::zeros();
m[(0, (xa-1) as usize)] = x.signum();
m[(1, (ya-1) as usize)] = y.signum();
m[(2, (za-1) as usize)] = z.signum();
orients.push(m);
}
}
}
orients
});
impl Scanner {
fn new() -> Scanner {
Scanner {
beacons: vec![],
dist_map: HashMap::new(),
origin: A::new(0, 0, 0),
}
}
fn add_beacon(&mut self, beacon: &V) {
let n = self.beacons.len();
for (i, b) in self.beacons.iter().enumerate() {
let sd = sort_dist(b, beacon);
self.dist_map.entry(sd).or_default().push((i, n));
}
self.beacons.push(*beacon);
}
fn len_intersect(&self, other: &Self) -> usize {
let a: HashSet<V> = self.dist_map.keys().cloned().collect();
let b: HashSet<V> = other.dist_map.keys().cloned().collect();
a.intersection(&b).count()
}
fn reorient(&self, v: V, r: &R) -> Self {
//let a = inverse_orient(&a);
let b: Vec<V> = self
.beacons
.iter()
.map(|b| {
(self.origin + v + r * b).coords
})
.collect();
Scanner {
origin: self.origin + v,
beacons: b,
dist_map: self.dist_map.clone(),
}
}
// relative position, re-orient
fn align(&self, other: &Self) -> Self {
assert!(self.len_intersect(other) >= 66);
let mut b2b: HashMap<usize, HashSet<usize>> = HashMap::new();
// for _ in 0..self.beacons.len() {
// b2b.push((0..other.beacons.len()).collect());
// }
for (k, v) in self.dist_map.iter() {
if other.dist_map.contains_key(k) {
// each in the pair can only map to the other pair.
assert!(v.len() == 1 || other.dist_map[k].len() == 1);
let r = v[0];
let v2 = other.dist_map[k][0];
let s: HashSet<usize> = [v2.0, v2.1].iter().cloned().collect();
let n = b2b
.entry(r.0)
.or_insert_with(|| s.clone())
.intersection(&s)
.cloned()
.collect();
b2b.insert(r.0, n);
let n = b2b
.entry(r.1)
.or_insert_with(|| s.clone())
.intersection(&s)
.cloned()
.collect();
b2b.insert(r.1, n);
}
}
//println!("{}", b2b.len());
// 1-1 mapping across common beacons
for ori in &*ORIENTS {
// make sure the first two beacons map to each other.
let bs: Vec<(usize, usize)> = b2b
.iter()
.take(4)
.map(|(a, b)| (*a, *b.iter().next().unwrap()))
.collect();
let o1 = {
let b1 = self.beacons[bs[0].0];
let b2 = other.beacons[bs[0].1];
b1 - ori * b2
};
let o2 = {
let b1 = self.beacons[bs[1].0];
let b2 = other.beacons[bs[1].1];
b1 - ori * b2
};
let o3 = {
let b1 = self.beacons[bs[2].0];
let b2 = other.beacons[bs[2].1];
b1 - ori * b2
};
let o4 = {
let b1 = self.beacons[bs[3].0];
let b2 = other.beacons[bs[3].1];
b1 - ori * b2
};
if o1 == o2 && o2 == o3 && o3 == o4 {
return other.reorient(o1, ori);
}
}
panic!();
}
}
#[allow(unused)]
fn compose_alignment(r1: &R, r2: &R) -> R {
r2 * r1
}
#[allow(unused)]
fn orient_location(v: &V, r: &R) -> V {
r * v
}
pub fn day19() {
let lines = read_lines("input/day19.txt", false).unwrap();
let mut curr = Scanner::new();
let mut scanners = vec![];
for line in lines {
if line.is_empty() {
scanners.push(curr);
curr = Scanner::new()
} else if line.contains("scanner") {
} else {
let toks: Vec<_> = line.split(",").map(|x| x.parse::<isize>().unwrap()).collect();
let v = V::new(toks[0], toks[1], toks[2]);
curr.add_beacon(&v);
}
}
//scanners.push(curr);
let mut total_beacons: usize = scanners.iter().map(|x| x.beacons.len()).sum();
let min_beacons = 12;
let min_rel = min_beacons * (min_beacons - 1) / 2;
let mut orients: Vec<_> = vec![];
for i in 0..scanners.len() {
for j in i + 1..scanners.len() {
let li = scanners[i].len_intersect(&scanners[j]);
if li >= min_rel {
total_beacons -= (li * 2).integer_sqrt() + 1;
orients.push((i, j)); //, scanners[i].align(&scanners[j])));
orients.push((j, i)); //, scanners[j].align(&scanners[i])));
// orients.push((i, j, scanners[i].align(&scanners[j])));
// orients.push((j, i, scanners[j].align(&scanners[i])));
}
}
}
println!("{}", total_beacons);
// find the positions and orient of all relative to 0
let mut found: Vec<Option<Scanner>> = vec![None; scanners.len()];
found[0] = Some(scanners[0].clone());
let mut active = vec![0];
while let Some(a) = active.pop() {
// println!("{}", a);
// go through every alignment of a with something.
for (i, j) in &orients {
if *i == a && found[*j].is_none() {
found[*j] = Some(found[*i].as_ref().unwrap().align(&scanners[*j]));
active.push(*j);
}
}
}
for x in 0..scanners.len() {
if found[x].is_none() {
println!("bad: {}", x);
}
}
let mut max_dist = 0;
for i in 0..found.len() {
for j in i+1..found.len() {
let d = found[i].as_ref().unwrap().origin - found[j].as_ref().unwrap().origin;
max_dist = std::cmp::max(max_dist, d[0].abs() + d[1].abs() + d[2].abs());
}
}
println!("{}", max_dist);
}
| 24.393443 | 87 | 0.544523 |
ff1747e55e6b81ff089a73bb3216fefe381dcd5a | 1,200 | #[derive(PartialEq,Debug)]
pub struct USD(f32); // we are not ritch so f32 will be enough
#[derive(PartialEq,Debug)]
pub struct GBP(f32); // Brits people can be ritch but anyway ...
#[derive(PartialEq,Debug)]
pub struct TL(f32); // The poorest & the cheapest one ...
pub trait ToUSDv<F>
{
fn to_uv(&self,f:F) -> f32; //where F is any type
}
pub trait FromUSDv<F>
{
fn from_uv(&self,f:f32)->F; //where F is any type
}
pub struct Ex{
tl:f32,
gbp:f32,
}
impl ToUSDv<GBP> for Ex{
fn to_uv(&self,g:GBP) ->f32
{
(g.0 as f32) * self.gbp
}
}
impl FromUSDv<TL> for Ex{
fn from_uv(&self,f:f32)->TL
{
TL(f / self.tl)
}
}
//F is from T is to type
pub trait Exchange<F,T> {
fn convert(&self,f:F)->T;
}
impl <E,F,T> Exchange<F,T> for E
where E:ToUSDv<F> + FromUSDv<T>
{
fn convert(&self,f:F) -> T{
self.from_uv(self.to_uv(f))
}
}
pub fn test_it()
{
let g = GBP(200.0);
let ex = Ex{ tl: 0.7, gbp: 1.3 };
// let c = ex.from_uv(ex.to_uv(g));
let c = ex.convert(g);
if c == TL(371.) {
println!("Holly molly works !");
}else{
println!("Nope calculation is wrong !");
}
}
| 18.75 | 64 | 0.558333 |
1ade0891216a79d8fdd8e5cbf1e656ab2ffb9c4c | 3,821 | use anyhow::{bail, Result};
use chrono::DateTime;
use chrono::Utc;
use rust_decimal::prelude::*;
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use rusty_money::{
iso::{self, Currency},
ExchangeRate, Money, MoneyError,
};
use sqlx::types::BigDecimal;
pub(crate) trait ToBigDecimal {
fn to_bigdecimal(&self) -> BigDecimal;
}
impl ToBigDecimal for f64 {
fn to_bigdecimal(&self) -> BigDecimal {
BigDecimal::from_f64(*self).unwrap_or_default()
}
}
pub(crate) fn to_bigdecimal<F64: ToBigDecimal>(f: F64) -> BigDecimal {
F64::to_bigdecimal(&f)
}
pub(crate) trait RoundTwo {
fn round_two(&self) -> f64;
}
impl RoundTwo for BigDecimal {
fn round_two(&self) -> f64 {
let to = self.to_f64().unwrap_or_default();
(to * 100.0).round() / 100.0
}
}
pub(crate) fn round_two<R: RoundTwo>(value: &R) -> f64 {
R::round_two(value)
}
trait ConversionRate {
fn get_rate_for(&self, foreign: &Currency) -> Result<Decimal>;
}
// source: https://mercati.ilsole24ore.com/tassi-e-valute/valute/contro-euro/cambio/EURUS.FX
impl ConversionRate for Currency {
fn get_rate_for(&self, foreign: &Currency) -> Result<Decimal> {
match (self.iso_alpha_code, foreign.iso_alpha_code) {
("EUR", "EUR") => Ok(dec!(1)),
("USD", "USD") => Ok(dec!(1)),
("EUR", "USD") => Ok(dec!(1.131857)),
("USD", "EUR") => Ok(dec!(0.86207)),
_ => bail!("Currency not supported yet! :( Feel free to open a PR :)!"),
}
}
}
pub(crate) fn exchange(from: &str, to: &str, amount: f64, rate: Decimal) -> Result<f64> {
let iso_from = iso::find(from).ok_or(MoneyError::InvalidCurrency)?;
let iso_to = iso::find(to).ok_or(MoneyError::InvalidCurrency)?;
let amount = Decimal::from_f64(amount).ok_or(MoneyError::InvalidAmount)?;
let amount = ExchangeRate::new(iso_from, iso_to, rate)?
.convert(Money::from_decimal(amount, iso_from))
.map(|money| *money.amount())?;
amount
.to_f64()
.ok_or_else(|| anyhow::anyhow!("Conversion failed for {}", amount))
}
pub(crate) fn get_datetime_zero() -> DateTime<Utc> {
let zero = chrono::NaiveDate::from_ymd(1970, 1, 1).and_hms_milli(0, 0, 0, 0);
DateTime::<Utc>::from_utc(zero, Utc)
}
#[cfg(test)]
mod tests {
use crate::util::{get_datetime_zero, round_two, to_bigdecimal};
use rust_decimal_macros::dec;
use sqlx::types::BigDecimal;
use std::str::FromStr;
use super::exchange;
#[test]
fn test_exchange_usd_eur_works() {
let amount = 10;
let eur = exchange("USD", "EUR", amount.into(), dec!(4.20)).unwrap();
assert_eq!(eur, 42 as f64);
}
#[test]
fn test_exchange_eur_usd_works() {
let amount = 10;
let usd = exchange("EUR", "USD", amount.into(), dec!(42)).unwrap();
assert_eq!(usd, 420.0 as f64);
}
#[test]
fn test_exchange_eur_usd2_works() {
let amount = 1;
let usd = exchange("EUR", "USD", amount.into(), dec!(0.42)).unwrap();
assert_eq!(usd, 0.42 as f64);
}
// #[test]
// fn test_exchange_others_not_works() {
// let amount = 10;
// let change = exchange("EUR", "CAD", amount.into(), dec!(0.22));
// assert!(change.is_err());
// }
#[test]
fn test_datetime_zero_works() {
let expect = get_datetime_zero();
assert_eq!(expect.to_string(), String::from("1970-01-01 00:00:00 UTC"));
}
#[test]
fn test_exchange_to_bigdecimal_works() {
assert_eq!(
to_bigdecimal(-123.0),
BigDecimal::from_str("-123.0000000000000").unwrap()
);
}
#[test]
fn test_exchange_round_two_works() {
assert_eq!(round_two(&BigDecimal::from_str("123.12").unwrap()), 123.12);
}
}
| 28.303704 | 92 | 0.603769 |
d792d3b575e9c3ef99a8d6c3c7dc7885b747393a | 3,160 | use crate::{libra_channel, message_queues::QueueStyle};
use futures::{executor::block_on, future::join};
use libra_types::account_address::AccountAddress;
use libra_types::account_address::ADDRESS_LENGTH;
use std::time::Duration;
use tokio::prelude::*;
use tokio::runtime::Runtime;
use tokio::timer::delay_for;
#[test]
fn test_send_recv_order() {
let (mut sender, mut receiver) = libra_channel::new(QueueStyle::FIFO, 10, None);
sender.push(0, 0).unwrap();
sender.push(0, 1).unwrap();
sender.push(0, 2).unwrap();
sender.push(0, 3).unwrap();
let task = async move {
// Ensure that messages are received in order
assert_eq!(receiver.select_next_some().await, 0);
assert_eq!(receiver.select_next_some().await, 1);
assert_eq!(receiver.select_next_some().await, 2);
assert_eq!(receiver.select_next_some().await, 3);
// Ensures that there is no other value which is ready
assert_eq!(receiver.select_next_some().now_or_never(), None);
};
block_on(task);
}
#[test]
fn test_empty() {
let (_, mut receiver) = libra_channel::new::<u8, u8>(QueueStyle::FIFO, 10, None);
// Ensures that there is no other value which is ready
assert_eq!(receiver.select_next_some().now_or_never(), None);
}
#[test]
fn test_waker() {
let (mut sender, mut receiver) = libra_channel::new(QueueStyle::FIFO, 10, None);
// Ensures that there is no other value which is ready
assert_eq!(receiver.select_next_some().now_or_never(), None);
let f1 = async move {
assert_eq!(receiver.select_next_some().await, 0);
assert_eq!(receiver.select_next_some().await, 1);
assert_eq!(receiver.select_next_some().await, 2);
};
let f2 = async {
delay_for(Duration::from_millis(100)).await;
sender.push(0, 0).unwrap();
delay_for(Duration::from_millis(100)).await;
sender.push(0, 1).unwrap();
delay_for(Duration::from_millis(100)).await;
sender.push(0, 2).unwrap();
};
let rt = Runtime::new().unwrap();
rt.block_on(join(f1, f2));
}
fn test_multiple_validators_helper(
queue_style: QueueStyle,
num_messages_per_validator: usize,
expected_last_message: usize,
) {
let (mut sender, mut receiver) = libra_channel::new(queue_style, 1, None);
let num_validators = 128;
for message in 0..num_messages_per_validator {
for validator in 0..num_validators {
sender
.push(
AccountAddress::new([validator as u8; ADDRESS_LENGTH]),
(validator, message),
)
.unwrap();
}
}
block_on(async {
for i in 0..num_validators {
assert_eq!(
receiver.select_next_some().await,
(i, expected_last_message)
);
}
});
assert_eq!(receiver.select_next_some().now_or_never(), None);
}
#[test]
fn test_multiple_validators_fifo() {
test_multiple_validators_helper(QueueStyle::FIFO, 1024, 0);
}
#[test]
fn test_multiple_validators_lifo() {
test_multiple_validators_helper(QueueStyle::LIFO, 1024, 1023);
}
| 33.263158 | 85 | 0.641772 |
18884b90abe2e1c05f15de32d393e9063a637c4b | 11,168 | use async_task::{Runnable, Task};
use crossbeam_deque::{Injector, Stealer, Worker};
use std::{
cell::RefCell,
future::Future,
hash::{Hash, Hasher},
iter,
sync::{
atomic::{
AtomicUsize,
Ordering::{AcqRel, Acquire, Relaxed, Release},
},
Arc, RwLock,
},
task::{Context, Poll},
thread,
};
use tracing::{trace, trace_span, Span};
use crate::parking::Parking;
thread_local! {
static CURRENT: RefCell<Option<WeakExecutor>> = RefCell::new(None);
}
static EXECUTOR_ID: AtomicUsize = AtomicUsize::new(0);
pub struct Executor {
handle: Arc<Handle>,
}
#[derive(Clone)]
struct WeakExecutor {
handle: Arc<Handle>,
}
struct Handle {
asinc: AsyncHandle,
blocking: BlockingHandle,
refs: AtomicUsize,
span: Span,
}
struct AsyncHandle {
injector: Injector<(Runnable, Span)>,
stealers: Box<[Stealer<(Runnable, Span)>]>,
parking: Parking,
task_id: AtomicUsize,
}
struct BlockingHandle {
injector: Injector<(Runnable, Span)>,
stealers: RwLock<Vec<Stealer<(Runnable, Span)>>>,
parking: Parking,
task_id: AtomicUsize,
}
pub struct EnterGuard {
previous: Option<WeakExecutor>,
}
impl Executor {
pub fn new() -> Self {
let cpus = num_cpus::get().max(1);
Self::with_workers(cpus, cpus * 4)
}
pub fn with_workers(num_async: usize, num_blocking: usize) -> Self {
let id = EXECUTOR_ID.fetch_add(1, Relaxed);
let span = trace_span!(target: "executor", "executor", id = id);
trace!(
target: "executor",
parent: &span,
"starting executor with {} async and up to {} blocking workers",
num_async,
num_blocking,
);
let mut stealers = Vec::with_capacity(num_async);
let mut workers = Vec::with_capacity(num_async);
for _ in 0..num_async {
let worker = Worker::new_fifo();
stealers.push(worker.stealer());
workers.push(worker);
}
let executor = Self {
handle: Arc::new(Handle {
asinc: AsyncHandle {
injector: Injector::new(),
stealers: stealers.into_boxed_slice(),
parking: Parking::new(),
task_id: AtomicUsize::new(0),
},
blocking: BlockingHandle {
injector: Injector::new(),
stealers: RwLock::new(Vec::with_capacity(num_blocking)),
parking: Parking::new(),
task_id: AtomicUsize::new(0),
},
refs: AtomicUsize::new(1),
span,
}),
};
for (i, worker) in workers.into_iter().enumerate() {
trace!(
target: "executor",
parent: &executor.handle.span,
"starting async worker {}",
i,
);
let executor = executor.downgrade();
thread::Builder::new()
.name(format!("async-worker-{}", i))
.spawn(move || async_worker(worker, executor))
.unwrap();
}
executor
}
pub fn block_on<F: Future>(&self, future: F) -> F::Output {
futures::pin_mut!(future);
let parking = Arc::new(Parking::new());
let waker = futures::task::waker(parking.clone());
let mut context = Context::from_waker(&waker);
let _guard = self.downgrade().enter();
let output = loop {
match future.as_mut().poll(&mut context) {
Poll::Ready(output) => break output,
Poll::Pending => parking.park(),
}
};
output
}
pub fn spawn<F: Future>(&self, future: F) -> Task<F::Output>
where
F: Send + 'static,
F::Output: Send,
{
let id = self.handle.asinc.task_id.fetch_add(1, Relaxed);
let span = trace_span!(
target: "executor",
parent: &self.handle.span,
"task",
id = id,
);
let executor = self.downgrade();
let (runnable, task) = async_task::spawn(future, move |runnable| {
executor
.handle
.asinc
.injector
.push((runnable, span.clone()));
executor.handle.asinc.parking.unpark_one();
});
runnable.schedule();
task
}
pub fn spawn_blocking<T, F: FnOnce() -> T>(&self, f: F) -> Task<T>
where
F: Send + 'static,
T: Send + 'static,
{
let future = futures::future::lazy(move |_| f());
let id = self.handle.blocking.task_id.fetch_add(1, Relaxed);
let span = trace_span!(
target: "executor",
parent: &self.handle.span,
"blocking-task",
id = id,
);
let executor = self.downgrade();
let (runnable, task) = async_task::spawn(future, move |runnable| {
let handle = &executor.handle.blocking;
handle.injector.push((runnable, span.clone()));
let (should_start_worker, worker_id) = handle
.parking
.is_empty()
.then(|| {
handle
.stealers
.read()
.map(|s| (s.len() < s.capacity(), s.len()))
.unwrap()
})
.unwrap_or((false, 0));
if should_start_worker {
trace!(
target: "executor",
parent: &executor.handle.span,
"starting blocking worker {}",
worker_id,
);
let worker = Worker::new_fifo();
let stealer = worker.stealer();
handle.stealers.write().unwrap().push(stealer);
let executor = executor.clone();
thread::Builder::new()
.name(format!("blocking-worker-{}", worker_id))
.spawn(move || blocking_worker(worker, executor))
.unwrap();
} else {
handle.parking.unpark_one();
}
});
runnable.schedule();
task
}
pub fn current() -> Self {
CURRENT
.with(|c| c.borrow().clone())
.expect("not inside an executor context")
.upgrade()
.expect("executor is shutting down")
}
pub fn enter(&self) -> EnterGuard {
self.downgrade().enter()
}
fn downgrade(&self) -> WeakExecutor {
WeakExecutor {
handle: self.handle.clone(),
}
}
}
impl WeakExecutor {
fn upgrade(self) -> Option<Executor> {
if self.handle.refs.fetch_add(1, AcqRel) == 0 {
self.handle.refs.fetch_sub(1, Release);
trace!(
target: "executor",
parent: &self.handle.span,
"shutting down executor",
);
self.handle.asinc.parking.unpark_all();
self.handle.blocking.parking.unpark_all();
return None;
}
Some(Executor {
handle: self.handle,
})
}
fn enter(self) -> EnterGuard {
let previous = CURRENT.with(|c| c.borrow_mut().replace(self));
EnterGuard { previous }
}
}
pub fn block_on<F: Future>(future: F) -> F::Output {
CURRENT
.with(|c| c.borrow().clone())
.map(|e| e.upgrade().expect("executor is shutting down"))
.unwrap_or_default()
.block_on(future)
}
pub fn spawn<F: Future>(future: F) -> Task<F::Output>
where
F: Send + 'static,
F::Output: Send,
{
Executor::current().spawn(future)
}
pub fn spawn_blocking<T, F: FnOnce() -> T>(f: F) -> Task<T>
where
F: Send + 'static,
T: Send + 'static,
{
Executor::current().spawn_blocking(f)
}
fn async_worker(worker: Worker<(Runnable, Span)>, executor: WeakExecutor) {
let _guard = executor.clone().enter();
let handle = &executor.handle.asinc;
let refs = &executor.handle.refs;
loop {
let task = worker.pop().or_else(|| {
iter::repeat_with(|| {
handle
.injector
.steal_batch_and_pop(&worker)
.or_else(|| handle.stealers.iter().map(|s| s.steal()).collect())
})
.find(|s| !s.is_retry())
.and_then(|s| s.success())
});
match task {
Some((task, span)) => {
let _span = span.enter();
task.run();
}
None if refs.load(Acquire) == 0 => break,
None => handle.parking.park(),
}
}
trace!(
target: "executor",
parent: &executor.handle.span,
"shutting down worker",
);
}
fn blocking_worker(worker: Worker<(Runnable, Span)>, executor: WeakExecutor) {
let _guard = executor.clone().enter();
let handle = &executor.handle.blocking;
let refs = &executor.handle.refs;
loop {
let task = worker.pop().or_else(|| {
iter::repeat_with(|| {
handle.injector.steal_batch_and_pop(&worker).or_else(|| {
handle
.stealers
.read()
.unwrap()
.iter()
.map(|s| s.steal())
.collect()
})
})
.find(|s| !s.is_retry())
.and_then(|s| s.success())
});
match task {
Some((task, span)) => {
let _span = span.enter();
task.run();
}
None if refs.load(Acquire) == 0 => break,
None => handle.parking.park(),
}
}
trace!(
target: "executor",
parent: &executor.handle.span,
"shutting down worker",
);
}
impl Clone for Executor {
fn clone(&self) -> Self {
self.handle.refs.fetch_add(1, Release);
Self {
handle: self.handle.clone(),
}
}
}
impl Drop for Executor {
fn drop(&mut self) {
if self.handle.refs.fetch_sub(1, AcqRel) == 1 {
trace!(
target: "executor",
parent: &self.handle.span,
"shutting down executor",
);
self.handle.asinc.parking.unpark_all();
self.handle.blocking.parking.unpark_all();
}
}
}
impl Default for Executor {
fn default() -> Self {
Self::new()
}
}
impl PartialEq for Executor {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.handle, &other.handle)
}
}
impl Eq for Executor {}
impl Hash for Executor {
fn hash<H: Hasher>(&self, state: &mut H) {
Arc::as_ptr(&self.handle).hash(state);
}
}
impl Drop for EnterGuard {
fn drop(&mut self) {
CURRENT.with(|c| *c.borrow_mut() = self.previous.take());
}
}
| 26.527316 | 84 | 0.492747 |
236b4e151d04403b67cfb4d0db29e4ab44b165d1 | 474,183 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// <p>The configuration details of the recommender.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RecommenderConfig {
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your recommenders generate personalized recommendations for a user
/// (not popular items or similar items).</p>
pub item_exploration_config:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
}
impl RecommenderConfig {
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your recommenders generate personalized recommendations for a user
/// (not popular items or similar items).</p>
pub fn item_exploration_config(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.item_exploration_config.as_ref()
}
}
impl std::fmt::Debug for RecommenderConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RecommenderConfig");
formatter.field("item_exploration_config", &self.item_exploration_config);
formatter.finish()
}
}
/// See [`RecommenderConfig`](crate::model::RecommenderConfig)
pub mod recommender_config {
/// A builder for [`RecommenderConfig`](crate::model::RecommenderConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) item_exploration_config: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
}
impl Builder {
/// Adds a key-value pair to `item_exploration_config`.
///
/// To override the contents of this collection use [`set_item_exploration_config`](Self::set_item_exploration_config).
///
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your recommenders generate personalized recommendations for a user
/// (not popular items or similar items).</p>
pub fn item_exploration_config(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.item_exploration_config.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.item_exploration_config = Some(hash_map);
self
}
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your recommenders generate personalized recommendations for a user
/// (not popular items or similar items).</p>
pub fn set_item_exploration_config(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.item_exploration_config = input;
self
}
/// Consumes the builder and constructs a [`RecommenderConfig`](crate::model::RecommenderConfig)
pub fn build(self) -> crate::model::RecommenderConfig {
crate::model::RecommenderConfig {
item_exploration_config: self.item_exploration_config,
}
}
}
}
impl RecommenderConfig {
/// Creates a new builder-style object to manufacture [`RecommenderConfig`](crate::model::RecommenderConfig)
pub fn builder() -> crate::model::recommender_config::Builder {
crate::model::recommender_config::Builder::default()
}
}
/// <p>The configuration details of a campaign.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CampaignConfig {
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your solution uses the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a> recipe.</p>
pub item_exploration_config:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
}
impl CampaignConfig {
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your solution uses the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a> recipe.</p>
pub fn item_exploration_config(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.item_exploration_config.as_ref()
}
}
impl std::fmt::Debug for CampaignConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CampaignConfig");
formatter.field("item_exploration_config", &self.item_exploration_config);
formatter.finish()
}
}
/// See [`CampaignConfig`](crate::model::CampaignConfig)
pub mod campaign_config {
/// A builder for [`CampaignConfig`](crate::model::CampaignConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) item_exploration_config: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
}
impl Builder {
/// Adds a key-value pair to `item_exploration_config`.
///
/// To override the contents of this collection use [`set_item_exploration_config`](Self::set_item_exploration_config).
///
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your solution uses the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a> recipe.</p>
pub fn item_exploration_config(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.item_exploration_config.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.item_exploration_config = Some(hash_map);
self
}
/// <p>Specifies the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items. Provide <code>itemExplorationConfig</code> data only if your solution uses the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a> recipe.</p>
pub fn set_item_exploration_config(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.item_exploration_config = input;
self
}
/// Consumes the builder and constructs a [`CampaignConfig`](crate::model::CampaignConfig)
pub fn build(self) -> crate::model::CampaignConfig {
crate::model::CampaignConfig {
item_exploration_config: self.item_exploration_config,
}
}
}
}
impl CampaignConfig {
/// Creates a new builder-style object to manufacture [`CampaignConfig`](crate::model::CampaignConfig)
pub fn builder() -> crate::model::campaign_config::Builder {
crate::model::campaign_config::Builder::default()
}
}
/// <p>Provides a summary of the properties of a solution version. For a complete listing, call the
/// <a>DescribeSolutionVersion</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SolutionVersionSummary {
/// <p>The Amazon Resource Name (ARN) of the solution version.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that this version of a solution was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the solution version was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If a solution version fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl SolutionVersionSummary {
/// <p>The Amazon Resource Name (ARN) of the solution version.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that this version of a solution was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the solution version was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If a solution version fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for SolutionVersionSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SolutionVersionSummary");
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`SolutionVersionSummary`](crate::model::SolutionVersionSummary)
pub mod solution_version_summary {
/// A builder for [`SolutionVersionSummary`](crate::model::SolutionVersionSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the solution version.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that this version of a solution was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that this version of a solution was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the solution version was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the solution version was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If a solution version fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a solution version fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`SolutionVersionSummary`](crate::model::SolutionVersionSummary)
pub fn build(self) -> crate::model::SolutionVersionSummary {
crate::model::SolutionVersionSummary {
solution_version_arn: self.solution_version_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
}
}
}
}
impl SolutionVersionSummary {
/// Creates a new builder-style object to manufacture [`SolutionVersionSummary`](crate::model::SolutionVersionSummary)
pub fn builder() -> crate::model::solution_version_summary::Builder {
crate::model::solution_version_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a solution. For a complete listing, call the
/// <a>DescribeSolution</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SolutionSummary {
/// <p>The name of the solution.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the solution.</p>
pub solution_arn: std::option::Option<std::string::String>,
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the solution was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl SolutionSummary {
/// <p>The name of the solution.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the solution.</p>
pub fn solution_arn(&self) -> std::option::Option<&str> {
self.solution_arn.as_deref()
}
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the solution was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for SolutionSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SolutionSummary");
formatter.field("name", &self.name);
formatter.field("solution_arn", &self.solution_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`SolutionSummary`](crate::model::SolutionSummary)
pub mod solution_summary {
/// A builder for [`SolutionSummary`](crate::model::SolutionSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) solution_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the solution.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the solution.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the solution.</p>
pub fn solution_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the solution.</p>
pub fn set_solution_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.solution_arn = input;
self
}
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the solution was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the solution was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`SolutionSummary`](crate::model::SolutionSummary)
pub fn build(self) -> crate::model::SolutionSummary {
crate::model::SolutionSummary {
name: self.name,
solution_arn: self.solution_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl SolutionSummary {
/// Creates a new builder-style object to manufacture [`SolutionSummary`](crate::model::SolutionSummary)
pub fn builder() -> crate::model::solution_summary::Builder {
crate::model::solution_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a dataset schema. For a complete listing, call the
/// <a>DescribeSchema</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetSchemaSummary {
/// <p>The name of the schema.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub schema_arn: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub domain: std::option::Option<crate::model::Domain>,
}
impl DatasetSchemaSummary {
/// <p>The name of the schema.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub fn schema_arn(&self) -> std::option::Option<&str> {
self.schema_arn.as_deref()
}
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub fn domain(&self) -> std::option::Option<&crate::model::Domain> {
self.domain.as_ref()
}
}
impl std::fmt::Debug for DatasetSchemaSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetSchemaSummary");
formatter.field("name", &self.name);
formatter.field("schema_arn", &self.schema_arn);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("domain", &self.domain);
formatter.finish()
}
}
/// See [`DatasetSchemaSummary`](crate::model::DatasetSchemaSummary)
pub mod dataset_schema_summary {
/// A builder for [`DatasetSchemaSummary`](crate::model::DatasetSchemaSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) schema_arn: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) domain: std::option::Option<crate::model::Domain>,
}
impl Builder {
/// <p>The name of the schema.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the schema.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub fn schema_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.schema_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub fn set_schema_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.schema_arn = input;
self
}
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub fn domain(mut self, input: crate::model::Domain) -> Self {
self.domain = Some(input);
self
}
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub fn set_domain(mut self, input: std::option::Option<crate::model::Domain>) -> Self {
self.domain = input;
self
}
/// Consumes the builder and constructs a [`DatasetSchemaSummary`](crate::model::DatasetSchemaSummary)
pub fn build(self) -> crate::model::DatasetSchemaSummary {
crate::model::DatasetSchemaSummary {
name: self.name,
schema_arn: self.schema_arn,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
domain: self.domain,
}
}
}
}
impl DatasetSchemaSummary {
/// Creates a new builder-style object to manufacture [`DatasetSchemaSummary`](crate::model::DatasetSchemaSummary)
pub fn builder() -> crate::model::dataset_schema_summary::Builder {
crate::model::dataset_schema_summary::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum Domain {
#[allow(missing_docs)] // documentation missing in model
Ecommerce,
#[allow(missing_docs)] // documentation missing in model
VideoOnDemand,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for Domain {
fn from(s: &str) -> Self {
match s {
"ECOMMERCE" => Domain::Ecommerce,
"VIDEO_ON_DEMAND" => Domain::VideoOnDemand,
other => Domain::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for Domain {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(Domain::from(s))
}
}
impl Domain {
/// Returns the `&str` value of the enum member.
pub fn as_str(&self) -> &str {
match self {
Domain::Ecommerce => "ECOMMERCE",
Domain::VideoOnDemand => "VIDEO_ON_DEMAND",
Domain::Unknown(s) => s.as_ref(),
}
}
/// Returns all the `&str` values of the enum members.
pub fn values() -> &'static [&'static str] {
&["ECOMMERCE", "VIDEO_ON_DEMAND"]
}
}
impl AsRef<str> for Domain {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Provides a summary of the properties of the recommender.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RecommenderSummary {
/// <p>The name of the recommender.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub recommender_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.</p>
pub recipe_arn: std::option::Option<std::string::String>,
/// <p>The configuration details of the recommender.</p>
pub recommender_config: std::option::Option<crate::model::RecommenderConfig>,
/// <p>The status of the recommender. A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl RecommenderSummary {
/// <p>The name of the recommender.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub fn recommender_arn(&self) -> std::option::Option<&str> {
self.recommender_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.</p>
pub fn recipe_arn(&self) -> std::option::Option<&str> {
self.recipe_arn.as_deref()
}
/// <p>The configuration details of the recommender.</p>
pub fn recommender_config(&self) -> std::option::Option<&crate::model::RecommenderConfig> {
self.recommender_config.as_ref()
}
/// <p>The status of the recommender. A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for RecommenderSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RecommenderSummary");
formatter.field("name", &self.name);
formatter.field("recommender_arn", &self.recommender_arn);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("recipe_arn", &self.recipe_arn);
formatter.field("recommender_config", &self.recommender_config);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`RecommenderSummary`](crate::model::RecommenderSummary)
pub mod recommender_summary {
/// A builder for [`RecommenderSummary`](crate::model::RecommenderSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) recommender_arn: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) recipe_arn: std::option::Option<std::string::String>,
pub(crate) recommender_config: std::option::Option<crate::model::RecommenderConfig>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the recommender.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the recommender.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub fn recommender_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recommender_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub fn set_recommender_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommender_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.</p>
pub fn recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.</p>
pub fn set_recipe_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_arn = input;
self
}
/// <p>The configuration details of the recommender.</p>
pub fn recommender_config(mut self, input: crate::model::RecommenderConfig) -> Self {
self.recommender_config = Some(input);
self
}
/// <p>The configuration details of the recommender.</p>
pub fn set_recommender_config(
mut self,
input: std::option::Option<crate::model::RecommenderConfig>,
) -> Self {
self.recommender_config = input;
self
}
/// <p>The status of the recommender. A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the recommender. A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`RecommenderSummary`](crate::model::RecommenderSummary)
pub fn build(self) -> crate::model::RecommenderSummary {
crate::model::RecommenderSummary {
name: self.name,
recommender_arn: self.recommender_arn,
dataset_group_arn: self.dataset_group_arn,
recipe_arn: self.recipe_arn,
recommender_config: self.recommender_config,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl RecommenderSummary {
/// Creates a new builder-style object to manufacture [`RecommenderSummary`](crate::model::RecommenderSummary)
pub fn builder() -> crate::model::recommender_summary::Builder {
crate::model::recommender_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a recipe. For a complete listing, call the
/// <a>DescribeRecipe</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RecipeSummary {
/// <p>The name of the recipe.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub recipe_arn: std::option::Option<std::string::String>,
/// <p>The status of the recipe.</p>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the recipe was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the recipe was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The domain of the recipe (if the recipe is a Domain dataset group use case).</p>
pub domain: std::option::Option<crate::model::Domain>,
}
impl RecipeSummary {
/// <p>The name of the recipe.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub fn recipe_arn(&self) -> std::option::Option<&str> {
self.recipe_arn.as_deref()
}
/// <p>The status of the recipe.</p>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the recipe was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the recipe was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The domain of the recipe (if the recipe is a Domain dataset group use case).</p>
pub fn domain(&self) -> std::option::Option<&crate::model::Domain> {
self.domain.as_ref()
}
}
impl std::fmt::Debug for RecipeSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RecipeSummary");
formatter.field("name", &self.name);
formatter.field("recipe_arn", &self.recipe_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("domain", &self.domain);
formatter.finish()
}
}
/// See [`RecipeSummary`](crate::model::RecipeSummary)
pub mod recipe_summary {
/// A builder for [`RecipeSummary`](crate::model::RecipeSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) recipe_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) domain: std::option::Option<crate::model::Domain>,
}
impl Builder {
/// <p>The name of the recipe.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the recipe.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub fn recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub fn set_recipe_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_arn = input;
self
}
/// <p>The status of the recipe.</p>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the recipe.</p>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the recipe was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the recipe was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the recipe was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the recipe was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The domain of the recipe (if the recipe is a Domain dataset group use case).</p>
pub fn domain(mut self, input: crate::model::Domain) -> Self {
self.domain = Some(input);
self
}
/// <p>The domain of the recipe (if the recipe is a Domain dataset group use case).</p>
pub fn set_domain(mut self, input: std::option::Option<crate::model::Domain>) -> Self {
self.domain = input;
self
}
/// Consumes the builder and constructs a [`RecipeSummary`](crate::model::RecipeSummary)
pub fn build(self) -> crate::model::RecipeSummary {
crate::model::RecipeSummary {
name: self.name,
recipe_arn: self.recipe_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
domain: self.domain,
}
}
}
}
impl RecipeSummary {
/// Creates a new builder-style object to manufacture [`RecipeSummary`](crate::model::RecipeSummary)
pub fn builder() -> crate::model::recipe_summary::Builder {
crate::model::recipe_summary::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum RecipeProvider {
#[allow(missing_docs)] // documentation missing in model
Service,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for RecipeProvider {
fn from(s: &str) -> Self {
match s {
"SERVICE" => RecipeProvider::Service,
other => RecipeProvider::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for RecipeProvider {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(RecipeProvider::from(s))
}
}
impl RecipeProvider {
/// Returns the `&str` value of the enum member.
pub fn as_str(&self) -> &str {
match self {
RecipeProvider::Service => "SERVICE",
RecipeProvider::Unknown(s) => s.as_ref(),
}
}
/// Returns all the `&str` values of the enum members.
pub fn values() -> &'static [&'static str] {
&["SERVICE"]
}
}
impl AsRef<str> for RecipeProvider {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>A short summary of a filter's attributes.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct FilterSummary {
/// <p>The name of the filter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The ARN of the filter.</p>
pub filter_arn: std::option::Option<std::string::String>,
/// <p>The time at which the filter was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The time at which the filter was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>If the filter failed, the reason for the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The status of the filter.</p>
pub status: std::option::Option<std::string::String>,
}
impl FilterSummary {
/// <p>The name of the filter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The ARN of the filter.</p>
pub fn filter_arn(&self) -> std::option::Option<&str> {
self.filter_arn.as_deref()
}
/// <p>The time at which the filter was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The time at which the filter was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>If the filter failed, the reason for the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The status of the filter.</p>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
}
impl std::fmt::Debug for FilterSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("FilterSummary");
formatter.field("name", &self.name);
formatter.field("filter_arn", &self.filter_arn);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("status", &self.status);
formatter.finish()
}
}
/// See [`FilterSummary`](crate::model::FilterSummary)
pub mod filter_summary {
/// A builder for [`FilterSummary`](crate::model::FilterSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) filter_arn: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the filter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the filter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The ARN of the filter.</p>
pub fn filter_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.filter_arn = Some(input.into());
self
}
/// <p>The ARN of the filter.</p>
pub fn set_filter_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.filter_arn = input;
self
}
/// <p>The time at which the filter was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The time at which the filter was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The time at which the filter was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The time at which the filter was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>If the filter failed, the reason for the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If the filter failed, the reason for the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The status of the filter.</p>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the filter.</p>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// Consumes the builder and constructs a [`FilterSummary`](crate::model::FilterSummary)
pub fn build(self) -> crate::model::FilterSummary {
crate::model::FilterSummary {
name: self.name,
filter_arn: self.filter_arn,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
dataset_group_arn: self.dataset_group_arn,
failure_reason: self.failure_reason,
status: self.status,
}
}
}
}
impl FilterSummary {
/// Creates a new builder-style object to manufacture [`FilterSummary`](crate::model::FilterSummary)
pub fn builder() -> crate::model::filter_summary::Builder {
crate::model::filter_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of an event tracker. For a complete listing, call the
/// <a>DescribeEventTracker</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct EventTrackerSummary {
/// <p>The name of the event tracker.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the event tracker.</p>
pub event_tracker_arn: std::option::Option<std::string::String>,
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the event tracker was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl EventTrackerSummary {
/// <p>The name of the event tracker.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the event tracker.</p>
pub fn event_tracker_arn(&self) -> std::option::Option<&str> {
self.event_tracker_arn.as_deref()
}
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the event tracker was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for EventTrackerSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("EventTrackerSummary");
formatter.field("name", &self.name);
formatter.field("event_tracker_arn", &self.event_tracker_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`EventTrackerSummary`](crate::model::EventTrackerSummary)
pub mod event_tracker_summary {
/// A builder for [`EventTrackerSummary`](crate::model::EventTrackerSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) event_tracker_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the event tracker.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the event tracker.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the event tracker.</p>
pub fn event_tracker_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.event_tracker_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the event tracker.</p>
pub fn set_event_tracker_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.event_tracker_arn = input;
self
}
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the event tracker was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the event tracker was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`EventTrackerSummary`](crate::model::EventTrackerSummary)
pub fn build(self) -> crate::model::EventTrackerSummary {
crate::model::EventTrackerSummary {
name: self.name,
event_tracker_arn: self.event_tracker_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl EventTrackerSummary {
/// Creates a new builder-style object to manufacture [`EventTrackerSummary`](crate::model::EventTrackerSummary)
pub fn builder() -> crate::model::event_tracker_summary::Builder {
crate::model::event_tracker_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a dataset. For a complete listing, call the
/// <a>DescribeDataset</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetSummary {
/// <p>The name of the dataset.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset.</p>
pub dataset_arn: std::option::Option<std::string::String>,
/// <p>The dataset type. One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// <li>
/// <p>Event-Interactions</p>
/// </li>
/// </ul>
pub dataset_type: std::option::Option<std::string::String>,
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the dataset was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the dataset was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl DatasetSummary {
/// <p>The name of the dataset.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset.</p>
pub fn dataset_arn(&self) -> std::option::Option<&str> {
self.dataset_arn.as_deref()
}
/// <p>The dataset type. One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// <li>
/// <p>Event-Interactions</p>
/// </li>
/// </ul>
pub fn dataset_type(&self) -> std::option::Option<&str> {
self.dataset_type.as_deref()
}
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the dataset was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the dataset was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for DatasetSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetSummary");
formatter.field("name", &self.name);
formatter.field("dataset_arn", &self.dataset_arn);
formatter.field("dataset_type", &self.dataset_type);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`DatasetSummary`](crate::model::DatasetSummary)
pub mod dataset_summary {
/// A builder for [`DatasetSummary`](crate::model::DatasetSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) dataset_arn: std::option::Option<std::string::String>,
pub(crate) dataset_type: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the dataset.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the dataset.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset.</p>
pub fn dataset_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset.</p>
pub fn set_dataset_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.dataset_arn = input;
self
}
/// <p>The dataset type. One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// <li>
/// <p>Event-Interactions</p>
/// </li>
/// </ul>
pub fn dataset_type(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_type = Some(input.into());
self
}
/// <p>The dataset type. One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// <li>
/// <p>Event-Interactions</p>
/// </li>
/// </ul>
pub fn set_dataset_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.dataset_type = input;
self
}
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the dataset was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the dataset was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`DatasetSummary`](crate::model::DatasetSummary)
pub fn build(self) -> crate::model::DatasetSummary {
crate::model::DatasetSummary {
name: self.name,
dataset_arn: self.dataset_arn,
dataset_type: self.dataset_type,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl DatasetSummary {
/// Creates a new builder-style object to manufacture [`DatasetSummary`](crate::model::DatasetSummary)
pub fn builder() -> crate::model::dataset_summary::Builder {
crate::model::dataset_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a dataset import job. For a complete listing, call the
/// <a>DescribeDatasetImportJob</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetImportJobSummary {
/// <p>The Amazon Resource Name (ARN) of the dataset import job.</p>
pub dataset_import_job_arn: std::option::Option<std::string::String>,
/// <p>The name of the dataset import job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the dataset import job was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the dataset import job status was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If a dataset import job fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl DatasetImportJobSummary {
/// <p>The Amazon Resource Name (ARN) of the dataset import job.</p>
pub fn dataset_import_job_arn(&self) -> std::option::Option<&str> {
self.dataset_import_job_arn.as_deref()
}
/// <p>The name of the dataset import job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the dataset import job was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the dataset import job status was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If a dataset import job fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for DatasetImportJobSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetImportJobSummary");
formatter.field("dataset_import_job_arn", &self.dataset_import_job_arn);
formatter.field("job_name", &self.job_name);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`DatasetImportJobSummary`](crate::model::DatasetImportJobSummary)
pub mod dataset_import_job_summary {
/// A builder for [`DatasetImportJobSummary`](crate::model::DatasetImportJobSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) dataset_import_job_arn: std::option::Option<std::string::String>,
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the dataset import job.</p>
pub fn dataset_import_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_import_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset import job.</p>
pub fn set_dataset_import_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_import_job_arn = input;
self
}
/// <p>The name of the dataset import job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the dataset import job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the dataset import job was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset import job was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the dataset import job status was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset import job status was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If a dataset import job fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a dataset import job fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`DatasetImportJobSummary`](crate::model::DatasetImportJobSummary)
pub fn build(self) -> crate::model::DatasetImportJobSummary {
crate::model::DatasetImportJobSummary {
dataset_import_job_arn: self.dataset_import_job_arn,
job_name: self.job_name,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
}
}
}
}
impl DatasetImportJobSummary {
/// Creates a new builder-style object to manufacture [`DatasetImportJobSummary`](crate::model::DatasetImportJobSummary)
pub fn builder() -> crate::model::dataset_import_job_summary::Builder {
crate::model::dataset_import_job_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a dataset group. For a complete listing, call the
/// <a>DescribeDatasetGroup</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetGroupSummary {
/// <p>The name of the dataset group.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>The status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the dataset group was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the dataset group was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If creating a dataset group fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The domain of a Domain dataset group.</p>
pub domain: std::option::Option<crate::model::Domain>,
}
impl DatasetGroupSummary {
/// <p>The name of the dataset group.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>The status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the dataset group was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the dataset group was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If creating a dataset group fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The domain of a Domain dataset group.</p>
pub fn domain(&self) -> std::option::Option<&crate::model::Domain> {
self.domain.as_ref()
}
}
impl std::fmt::Debug for DatasetGroupSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetGroupSummary");
formatter.field("name", &self.name);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("domain", &self.domain);
formatter.finish()
}
}
/// See [`DatasetGroupSummary`](crate::model::DatasetGroupSummary)
pub mod dataset_group_summary {
/// A builder for [`DatasetGroupSummary`](crate::model::DatasetGroupSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) domain: std::option::Option<crate::model::Domain>,
}
impl Builder {
/// <p>The name of the dataset group.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the dataset group.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>The status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the dataset group was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset group was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the dataset group was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset group was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If creating a dataset group fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If creating a dataset group fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The domain of a Domain dataset group.</p>
pub fn domain(mut self, input: crate::model::Domain) -> Self {
self.domain = Some(input);
self
}
/// <p>The domain of a Domain dataset group.</p>
pub fn set_domain(mut self, input: std::option::Option<crate::model::Domain>) -> Self {
self.domain = input;
self
}
/// Consumes the builder and constructs a [`DatasetGroupSummary`](crate::model::DatasetGroupSummary)
pub fn build(self) -> crate::model::DatasetGroupSummary {
crate::model::DatasetGroupSummary {
name: self.name,
dataset_group_arn: self.dataset_group_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
domain: self.domain,
}
}
}
}
impl DatasetGroupSummary {
/// Creates a new builder-style object to manufacture [`DatasetGroupSummary`](crate::model::DatasetGroupSummary)
pub fn builder() -> crate::model::dataset_group_summary::Builder {
crate::model::dataset_group_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a dataset export job. For a complete listing, call the
/// <a>DescribeDatasetExportJob</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetExportJobSummary {
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub dataset_export_job_arn: std::option::Option<std::string::String>,
/// <p>The name of the dataset export job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the dataset export job was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the dataset export job status was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If a dataset export job fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl DatasetExportJobSummary {
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub fn dataset_export_job_arn(&self) -> std::option::Option<&str> {
self.dataset_export_job_arn.as_deref()
}
/// <p>The name of the dataset export job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the dataset export job was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the dataset export job status was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If a dataset export job fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for DatasetExportJobSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetExportJobSummary");
formatter.field("dataset_export_job_arn", &self.dataset_export_job_arn);
formatter.field("job_name", &self.job_name);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`DatasetExportJobSummary`](crate::model::DatasetExportJobSummary)
pub mod dataset_export_job_summary {
/// A builder for [`DatasetExportJobSummary`](crate::model::DatasetExportJobSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) dataset_export_job_arn: std::option::Option<std::string::String>,
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub fn dataset_export_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_export_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub fn set_dataset_export_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_export_job_arn = input;
self
}
/// <p>The name of the dataset export job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the dataset export job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the dataset export job was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset export job was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the dataset export job status was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the dataset export job status was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If a dataset export job fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a dataset export job fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`DatasetExportJobSummary`](crate::model::DatasetExportJobSummary)
pub fn build(self) -> crate::model::DatasetExportJobSummary {
crate::model::DatasetExportJobSummary {
dataset_export_job_arn: self.dataset_export_job_arn,
job_name: self.job_name,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
}
}
}
}
impl DatasetExportJobSummary {
/// Creates a new builder-style object to manufacture [`DatasetExportJobSummary`](crate::model::DatasetExportJobSummary)
pub fn builder() -> crate::model::dataset_export_job_summary::Builder {
crate::model::dataset_export_job_summary::Builder::default()
}
}
/// <p>Provides a summary of the properties of a campaign. For a complete listing, call the
/// <a>DescribeCampaign</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CampaignSummary {
/// <p>The name of the campaign.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the campaign.</p>
pub campaign_arn: std::option::Option<std::string::String>,
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the campaign was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the campaign was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If a campaign fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl CampaignSummary {
/// <p>The name of the campaign.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the campaign.</p>
pub fn campaign_arn(&self) -> std::option::Option<&str> {
self.campaign_arn.as_deref()
}
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix time) that the campaign was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the campaign was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If a campaign fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for CampaignSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CampaignSummary");
formatter.field("name", &self.name);
formatter.field("campaign_arn", &self.campaign_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`CampaignSummary`](crate::model::CampaignSummary)
pub mod campaign_summary {
/// A builder for [`CampaignSummary`](crate::model::CampaignSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) campaign_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the campaign.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the campaign.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the campaign.</p>
pub fn campaign_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.campaign_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the campaign.</p>
pub fn set_campaign_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.campaign_arn = input;
self
}
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix time) that the campaign was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the campaign was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the campaign was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the campaign was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If a campaign fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a campaign fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`CampaignSummary`](crate::model::CampaignSummary)
pub fn build(self) -> crate::model::CampaignSummary {
crate::model::CampaignSummary {
name: self.name,
campaign_arn: self.campaign_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
}
}
}
}
impl CampaignSummary {
/// Creates a new builder-style object to manufacture [`CampaignSummary`](crate::model::CampaignSummary)
pub fn builder() -> crate::model::campaign_summary::Builder {
crate::model::campaign_summary::Builder::default()
}
}
/// <p>A truncated version of the <a>BatchSegmentJob</a> datatype. The <a>ListBatchSegmentJobs</a> operation returns a list of batch segment job
/// summaries.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchSegmentJobSummary {
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub batch_segment_job_arn: std::option::Option<std::string::String>,
/// <p>The name of the batch segment job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The time at which the batch segment job was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The time at which the batch segment job was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
}
impl BatchSegmentJobSummary {
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub fn batch_segment_job_arn(&self) -> std::option::Option<&str> {
self.batch_segment_job_arn.as_deref()
}
/// <p>The name of the batch segment job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The time at which the batch segment job was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The time at which the batch segment job was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
}
impl std::fmt::Debug for BatchSegmentJobSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchSegmentJobSummary");
formatter.field("batch_segment_job_arn", &self.batch_segment_job_arn);
formatter.field("job_name", &self.job_name);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.finish()
}
}
/// See [`BatchSegmentJobSummary`](crate::model::BatchSegmentJobSummary)
pub mod batch_segment_job_summary {
/// A builder for [`BatchSegmentJobSummary`](crate::model::BatchSegmentJobSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) batch_segment_job_arn: std::option::Option<std::string::String>,
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub fn batch_segment_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.batch_segment_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub fn set_batch_segment_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.batch_segment_job_arn = input;
self
}
/// <p>The name of the batch segment job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the batch segment job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The time at which the batch segment job was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The time at which the batch segment job was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The time at which the batch segment job was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The time at which the batch segment job was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// Consumes the builder and constructs a [`BatchSegmentJobSummary`](crate::model::BatchSegmentJobSummary)
pub fn build(self) -> crate::model::BatchSegmentJobSummary {
crate::model::BatchSegmentJobSummary {
batch_segment_job_arn: self.batch_segment_job_arn,
job_name: self.job_name,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
solution_version_arn: self.solution_version_arn,
}
}
}
}
impl BatchSegmentJobSummary {
/// Creates a new builder-style object to manufacture [`BatchSegmentJobSummary`](crate::model::BatchSegmentJobSummary)
pub fn builder() -> crate::model::batch_segment_job_summary::Builder {
crate::model::batch_segment_job_summary::Builder::default()
}
}
/// <p>A truncated version of the <a>BatchInferenceJob</a> datatype. The <a>ListBatchInferenceJobs</a> operation returns a list of batch inference job
/// summaries.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchInferenceJobSummary {
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub batch_inference_job_arn: std::option::Option<std::string::String>,
/// <p>The name of the batch inference job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The time at which the batch inference job was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The time at which the batch inference job was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The ARN of the solution version used by the batch inference job.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
}
impl BatchInferenceJobSummary {
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub fn batch_inference_job_arn(&self) -> std::option::Option<&str> {
self.batch_inference_job_arn.as_deref()
}
/// <p>The name of the batch inference job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The time at which the batch inference job was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The time at which the batch inference job was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The ARN of the solution version used by the batch inference job.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
}
impl std::fmt::Debug for BatchInferenceJobSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchInferenceJobSummary");
formatter.field("batch_inference_job_arn", &self.batch_inference_job_arn);
formatter.field("job_name", &self.job_name);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.finish()
}
}
/// See [`BatchInferenceJobSummary`](crate::model::BatchInferenceJobSummary)
pub mod batch_inference_job_summary {
/// A builder for [`BatchInferenceJobSummary`](crate::model::BatchInferenceJobSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) batch_inference_job_arn: std::option::Option<std::string::String>,
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub fn batch_inference_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.batch_inference_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub fn set_batch_inference_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.batch_inference_job_arn = input;
self
}
/// <p>The name of the batch inference job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the batch inference job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The time at which the batch inference job was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The time at which the batch inference job was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The time at which the batch inference job was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The time at which the batch inference job was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The ARN of the solution version used by the batch inference job.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The ARN of the solution version used by the batch inference job.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// Consumes the builder and constructs a [`BatchInferenceJobSummary`](crate::model::BatchInferenceJobSummary)
pub fn build(self) -> crate::model::BatchInferenceJobSummary {
crate::model::BatchInferenceJobSummary {
batch_inference_job_arn: self.batch_inference_job_arn,
job_name: self.job_name,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
solution_version_arn: self.solution_version_arn,
}
}
}
}
impl BatchInferenceJobSummary {
/// Creates a new builder-style object to manufacture [`BatchInferenceJobSummary`](crate::model::BatchInferenceJobSummary)
pub fn builder() -> crate::model::batch_inference_job_summary::Builder {
crate::model::batch_inference_job_summary::Builder::default()
}
}
/// <p>An object that provides information about a specific version of a <a>Solution</a> in a Custom dataset group.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SolutionVersion {
/// <p>The ARN of the solution version.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
/// <p>The ARN of the solution.</p>
pub solution_arn: std::option::Option<std::string::String>,
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The default is
/// <code>false</code>.</p>
pub perform_hpo: bool,
/// <p>When true, Amazon Personalize searches for the most optimal recipe according to the solution
/// configuration. When false (the default), Amazon Personalize uses <code>recipeArn</code>.</p>
pub perform_auto_ml: bool,
/// <p>The ARN of the recipe used in the solution.</p>
pub recipe_arn: std::option::Option<std::string::String>,
/// <p>The event type (for example, 'click' or 'like') that is used for training the
/// model.</p>
pub event_type: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset group providing the training data.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>Describes the configuration properties for the solution.</p>
pub solution_config: std::option::Option<crate::model::SolutionConfig>,
/// <p>The time used to train the model. You are billed for the time it takes to train a model.
/// This field is visible only after Amazon Personalize successfully trains a model.</p>
pub training_hours: std::option::Option<f64>,
/// <p>The scope of training to be performed when creating the solution version. The
/// <code>FULL</code> option trains the solution version based on the entirety of the input
/// solution's training data, while the <code>UPDATE</code> option processes only the data that
/// has changed in comparison to the input solution. Choose <code>UPDATE</code> when you want to
/// incrementally update your solution version instead of creating an entirely new one.</p>
/// <important>
/// <p>The <code>UPDATE</code> option can only be used when you already have an active solution
/// version created from the input solution using the <code>FULL</code> option and the input
/// solution was trained with the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>
/// recipe or the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-hrnn-coldstart.html">HRNN-Coldstart</a> recipe.</p>
/// </important>
pub training_mode: std::option::Option<crate::model::TrainingMode>,
/// <p>If hyperparameter optimization was performed, contains the hyperparameter values of the
/// best performing model.</p>
pub tuned_hpo_params: std::option::Option<crate::model::TunedHpoParams>,
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING</p>
/// </li>
/// <li>
/// <p>CREATE IN_PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// <li>
/// <p>CREATE STOPPING</p>
/// </li>
/// <li>
/// <p>CREATE STOPPED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>If training a solution version fails, the reason for the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The date and
/// time
/// (in Unix time) that this version of the solution was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in
/// Unix
/// time) that the solution was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl SolutionVersion {
/// <p>The ARN of the solution version.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
/// <p>The ARN of the solution.</p>
pub fn solution_arn(&self) -> std::option::Option<&str> {
self.solution_arn.as_deref()
}
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The default is
/// <code>false</code>.</p>
pub fn perform_hpo(&self) -> bool {
self.perform_hpo
}
/// <p>When true, Amazon Personalize searches for the most optimal recipe according to the solution
/// configuration. When false (the default), Amazon Personalize uses <code>recipeArn</code>.</p>
pub fn perform_auto_ml(&self) -> bool {
self.perform_auto_ml
}
/// <p>The ARN of the recipe used in the solution.</p>
pub fn recipe_arn(&self) -> std::option::Option<&str> {
self.recipe_arn.as_deref()
}
/// <p>The event type (for example, 'click' or 'like') that is used for training the
/// model.</p>
pub fn event_type(&self) -> std::option::Option<&str> {
self.event_type.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset group providing the training data.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>Describes the configuration properties for the solution.</p>
pub fn solution_config(&self) -> std::option::Option<&crate::model::SolutionConfig> {
self.solution_config.as_ref()
}
/// <p>The time used to train the model. You are billed for the time it takes to train a model.
/// This field is visible only after Amazon Personalize successfully trains a model.</p>
pub fn training_hours(&self) -> std::option::Option<f64> {
self.training_hours
}
/// <p>The scope of training to be performed when creating the solution version. The
/// <code>FULL</code> option trains the solution version based on the entirety of the input
/// solution's training data, while the <code>UPDATE</code> option processes only the data that
/// has changed in comparison to the input solution. Choose <code>UPDATE</code> when you want to
/// incrementally update your solution version instead of creating an entirely new one.</p>
/// <important>
/// <p>The <code>UPDATE</code> option can only be used when you already have an active solution
/// version created from the input solution using the <code>FULL</code> option and the input
/// solution was trained with the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>
/// recipe or the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-hrnn-coldstart.html">HRNN-Coldstart</a> recipe.</p>
/// </important>
pub fn training_mode(&self) -> std::option::Option<&crate::model::TrainingMode> {
self.training_mode.as_ref()
}
/// <p>If hyperparameter optimization was performed, contains the hyperparameter values of the
/// best performing model.</p>
pub fn tuned_hpo_params(&self) -> std::option::Option<&crate::model::TunedHpoParams> {
self.tuned_hpo_params.as_ref()
}
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING</p>
/// </li>
/// <li>
/// <p>CREATE IN_PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// <li>
/// <p>CREATE STOPPING</p>
/// </li>
/// <li>
/// <p>CREATE STOPPED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>If training a solution version fails, the reason for the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The date and
/// time
/// (in Unix time) that this version of the solution was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in
/// Unix
/// time) that the solution was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for SolutionVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SolutionVersion");
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.field("solution_arn", &self.solution_arn);
formatter.field("perform_hpo", &self.perform_hpo);
formatter.field("perform_auto_ml", &self.perform_auto_ml);
formatter.field("recipe_arn", &self.recipe_arn);
formatter.field("event_type", &self.event_type);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("solution_config", &self.solution_config);
formatter.field("training_hours", &self.training_hours);
formatter.field("training_mode", &self.training_mode);
formatter.field("tuned_hpo_params", &self.tuned_hpo_params);
formatter.field("status", &self.status);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`SolutionVersion`](crate::model::SolutionVersion)
pub mod solution_version {
/// A builder for [`SolutionVersion`](crate::model::SolutionVersion)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
pub(crate) solution_arn: std::option::Option<std::string::String>,
pub(crate) perform_hpo: std::option::Option<bool>,
pub(crate) perform_auto_ml: std::option::Option<bool>,
pub(crate) recipe_arn: std::option::Option<std::string::String>,
pub(crate) event_type: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) solution_config: std::option::Option<crate::model::SolutionConfig>,
pub(crate) training_hours: std::option::Option<f64>,
pub(crate) training_mode: std::option::Option<crate::model::TrainingMode>,
pub(crate) tuned_hpo_params: std::option::Option<crate::model::TunedHpoParams>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The ARN of the solution version.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The ARN of the solution version.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// <p>The ARN of the solution.</p>
pub fn solution_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_arn = Some(input.into());
self
}
/// <p>The ARN of the solution.</p>
pub fn set_solution_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.solution_arn = input;
self
}
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The default is
/// <code>false</code>.</p>
pub fn perform_hpo(mut self, input: bool) -> Self {
self.perform_hpo = Some(input);
self
}
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The default is
/// <code>false</code>.</p>
pub fn set_perform_hpo(mut self, input: std::option::Option<bool>) -> Self {
self.perform_hpo = input;
self
}
/// <p>When true, Amazon Personalize searches for the most optimal recipe according to the solution
/// configuration. When false (the default), Amazon Personalize uses <code>recipeArn</code>.</p>
pub fn perform_auto_ml(mut self, input: bool) -> Self {
self.perform_auto_ml = Some(input);
self
}
/// <p>When true, Amazon Personalize searches for the most optimal recipe according to the solution
/// configuration. When false (the default), Amazon Personalize uses <code>recipeArn</code>.</p>
pub fn set_perform_auto_ml(mut self, input: std::option::Option<bool>) -> Self {
self.perform_auto_ml = input;
self
}
/// <p>The ARN of the recipe used in the solution.</p>
pub fn recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_arn = Some(input.into());
self
}
/// <p>The ARN of the recipe used in the solution.</p>
pub fn set_recipe_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_arn = input;
self
}
/// <p>The event type (for example, 'click' or 'like') that is used for training the
/// model.</p>
pub fn event_type(mut self, input: impl Into<std::string::String>) -> Self {
self.event_type = Some(input.into());
self
}
/// <p>The event type (for example, 'click' or 'like') that is used for training the
/// model.</p>
pub fn set_event_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.event_type = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group providing the training data.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group providing the training data.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>Describes the configuration properties for the solution.</p>
pub fn solution_config(mut self, input: crate::model::SolutionConfig) -> Self {
self.solution_config = Some(input);
self
}
/// <p>Describes the configuration properties for the solution.</p>
pub fn set_solution_config(
mut self,
input: std::option::Option<crate::model::SolutionConfig>,
) -> Self {
self.solution_config = input;
self
}
/// <p>The time used to train the model. You are billed for the time it takes to train a model.
/// This field is visible only after Amazon Personalize successfully trains a model.</p>
pub fn training_hours(mut self, input: f64) -> Self {
self.training_hours = Some(input);
self
}
/// <p>The time used to train the model. You are billed for the time it takes to train a model.
/// This field is visible only after Amazon Personalize successfully trains a model.</p>
pub fn set_training_hours(mut self, input: std::option::Option<f64>) -> Self {
self.training_hours = input;
self
}
/// <p>The scope of training to be performed when creating the solution version. The
/// <code>FULL</code> option trains the solution version based on the entirety of the input
/// solution's training data, while the <code>UPDATE</code> option processes only the data that
/// has changed in comparison to the input solution. Choose <code>UPDATE</code> when you want to
/// incrementally update your solution version instead of creating an entirely new one.</p>
/// <important>
/// <p>The <code>UPDATE</code> option can only be used when you already have an active solution
/// version created from the input solution using the <code>FULL</code> option and the input
/// solution was trained with the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>
/// recipe or the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-hrnn-coldstart.html">HRNN-Coldstart</a> recipe.</p>
/// </important>
pub fn training_mode(mut self, input: crate::model::TrainingMode) -> Self {
self.training_mode = Some(input);
self
}
/// <p>The scope of training to be performed when creating the solution version. The
/// <code>FULL</code> option trains the solution version based on the entirety of the input
/// solution's training data, while the <code>UPDATE</code> option processes only the data that
/// has changed in comparison to the input solution. Choose <code>UPDATE</code> when you want to
/// incrementally update your solution version instead of creating an entirely new one.</p>
/// <important>
/// <p>The <code>UPDATE</code> option can only be used when you already have an active solution
/// version created from the input solution using the <code>FULL</code> option and the input
/// solution was trained with the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>
/// recipe or the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-hrnn-coldstart.html">HRNN-Coldstart</a> recipe.</p>
/// </important>
pub fn set_training_mode(
mut self,
input: std::option::Option<crate::model::TrainingMode>,
) -> Self {
self.training_mode = input;
self
}
/// <p>If hyperparameter optimization was performed, contains the hyperparameter values of the
/// best performing model.</p>
pub fn tuned_hpo_params(mut self, input: crate::model::TunedHpoParams) -> Self {
self.tuned_hpo_params = Some(input);
self
}
/// <p>If hyperparameter optimization was performed, contains the hyperparameter values of the
/// best performing model.</p>
pub fn set_tuned_hpo_params(
mut self,
input: std::option::Option<crate::model::TunedHpoParams>,
) -> Self {
self.tuned_hpo_params = input;
self
}
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING</p>
/// </li>
/// <li>
/// <p>CREATE IN_PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// <li>
/// <p>CREATE STOPPING</p>
/// </li>
/// <li>
/// <p>CREATE STOPPED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the solution version.</p>
/// <p>A solution version can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING</p>
/// </li>
/// <li>
/// <p>CREATE IN_PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// <li>
/// <p>CREATE STOPPING</p>
/// </li>
/// <li>
/// <p>CREATE STOPPED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>If training a solution version fails, the reason for the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If training a solution version fails, the reason for the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The date and
/// time
/// (in Unix time) that this version of the solution was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and
/// time
/// (in Unix time) that this version of the solution was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in
/// Unix
/// time) that the solution was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in
/// Unix
/// time) that the solution was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`SolutionVersion`](crate::model::SolutionVersion)
pub fn build(self) -> crate::model::SolutionVersion {
crate::model::SolutionVersion {
solution_version_arn: self.solution_version_arn,
solution_arn: self.solution_arn,
perform_hpo: self.perform_hpo.unwrap_or_default(),
perform_auto_ml: self.perform_auto_ml.unwrap_or_default(),
recipe_arn: self.recipe_arn,
event_type: self.event_type,
dataset_group_arn: self.dataset_group_arn,
solution_config: self.solution_config,
training_hours: self.training_hours,
training_mode: self.training_mode,
tuned_hpo_params: self.tuned_hpo_params,
status: self.status,
failure_reason: self.failure_reason,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl SolutionVersion {
/// Creates a new builder-style object to manufacture [`SolutionVersion`](crate::model::SolutionVersion)
pub fn builder() -> crate::model::solution_version::Builder {
crate::model::solution_version::Builder::default()
}
}
/// <p>If hyperparameter optimization (HPO) was performed, contains the hyperparameter values of
/// the best performing model.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TunedHpoParams {
/// <p>A list of the hyperparameter values of the best performing model.</p>
pub algorithm_hyper_parameters:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
}
impl TunedHpoParams {
/// <p>A list of the hyperparameter values of the best performing model.</p>
pub fn algorithm_hyper_parameters(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.algorithm_hyper_parameters.as_ref()
}
}
impl std::fmt::Debug for TunedHpoParams {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TunedHpoParams");
formatter.field(
"algorithm_hyper_parameters",
&self.algorithm_hyper_parameters,
);
formatter.finish()
}
}
/// See [`TunedHpoParams`](crate::model::TunedHpoParams)
pub mod tuned_hpo_params {
/// A builder for [`TunedHpoParams`](crate::model::TunedHpoParams)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) algorithm_hyper_parameters: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
}
impl Builder {
/// Adds a key-value pair to `algorithm_hyper_parameters`.
///
/// To override the contents of this collection use [`set_algorithm_hyper_parameters`](Self::set_algorithm_hyper_parameters).
///
/// <p>A list of the hyperparameter values of the best performing model.</p>
pub fn algorithm_hyper_parameters(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.algorithm_hyper_parameters.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.algorithm_hyper_parameters = Some(hash_map);
self
}
/// <p>A list of the hyperparameter values of the best performing model.</p>
pub fn set_algorithm_hyper_parameters(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.algorithm_hyper_parameters = input;
self
}
/// Consumes the builder and constructs a [`TunedHpoParams`](crate::model::TunedHpoParams)
pub fn build(self) -> crate::model::TunedHpoParams {
crate::model::TunedHpoParams {
algorithm_hyper_parameters: self.algorithm_hyper_parameters,
}
}
}
}
impl TunedHpoParams {
/// Creates a new builder-style object to manufacture [`TunedHpoParams`](crate::model::TunedHpoParams)
pub fn builder() -> crate::model::tuned_hpo_params::Builder {
crate::model::tuned_hpo_params::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum TrainingMode {
#[allow(missing_docs)] // documentation missing in model
Full,
#[allow(missing_docs)] // documentation missing in model
Update,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for TrainingMode {
fn from(s: &str) -> Self {
match s {
"FULL" => TrainingMode::Full,
"UPDATE" => TrainingMode::Update,
other => TrainingMode::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for TrainingMode {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(TrainingMode::from(s))
}
}
impl TrainingMode {
/// Returns the `&str` value of the enum member.
pub fn as_str(&self) -> &str {
match self {
TrainingMode::Full => "FULL",
TrainingMode::Update => "UPDATE",
TrainingMode::Unknown(s) => s.as_ref(),
}
}
/// Returns all the `&str` values of the enum members.
pub fn values() -> &'static [&'static str] {
&["FULL", "UPDATE"]
}
}
impl AsRef<str> for TrainingMode {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Describes the configuration properties for the solution.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct SolutionConfig {
/// <p>Only events with a value greater than or equal to this threshold are
/// used for training a model.</p>
pub event_value_threshold: std::option::Option<std::string::String>,
/// <p>Describes the properties for hyperparameter optimization (HPO).</p>
pub hpo_config: std::option::Option<crate::model::HpoConfig>,
/// <p>Lists the hyperparameter names and ranges.</p>
pub algorithm_hyper_parameters:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>Lists the feature transformation parameters.</p>
pub feature_transformation_parameters:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The <a>AutoMLConfig</a> object containing a list of recipes to search
/// when AutoML is performed.</p>
pub auto_ml_config: std::option::Option<crate::model::AutoMlConfig>,
/// <p>Describes the additional objective for the solution, such as maximizing streaming
/// minutes or increasing revenue. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/optimizing-solution-for-objective.html">Optimizing a solution</a>.</p>
pub optimization_objective: std::option::Option<crate::model::OptimizationObjective>,
}
impl SolutionConfig {
/// <p>Only events with a value greater than or equal to this threshold are
/// used for training a model.</p>
pub fn event_value_threshold(&self) -> std::option::Option<&str> {
self.event_value_threshold.as_deref()
}
/// <p>Describes the properties for hyperparameter optimization (HPO).</p>
pub fn hpo_config(&self) -> std::option::Option<&crate::model::HpoConfig> {
self.hpo_config.as_ref()
}
/// <p>Lists the hyperparameter names and ranges.</p>
pub fn algorithm_hyper_parameters(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.algorithm_hyper_parameters.as_ref()
}
/// <p>Lists the feature transformation parameters.</p>
pub fn feature_transformation_parameters(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.feature_transformation_parameters.as_ref()
}
/// <p>The <a>AutoMLConfig</a> object containing a list of recipes to search
/// when AutoML is performed.</p>
pub fn auto_ml_config(&self) -> std::option::Option<&crate::model::AutoMlConfig> {
self.auto_ml_config.as_ref()
}
/// <p>Describes the additional objective for the solution, such as maximizing streaming
/// minutes or increasing revenue. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/optimizing-solution-for-objective.html">Optimizing a solution</a>.</p>
pub fn optimization_objective(
&self,
) -> std::option::Option<&crate::model::OptimizationObjective> {
self.optimization_objective.as_ref()
}
}
impl std::fmt::Debug for SolutionConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("SolutionConfig");
formatter.field("event_value_threshold", &self.event_value_threshold);
formatter.field("hpo_config", &self.hpo_config);
formatter.field(
"algorithm_hyper_parameters",
&self.algorithm_hyper_parameters,
);
formatter.field(
"feature_transformation_parameters",
&self.feature_transformation_parameters,
);
formatter.field("auto_ml_config", &self.auto_ml_config);
formatter.field("optimization_objective", &self.optimization_objective);
formatter.finish()
}
}
/// See [`SolutionConfig`](crate::model::SolutionConfig)
pub mod solution_config {
/// A builder for [`SolutionConfig`](crate::model::SolutionConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) event_value_threshold: std::option::Option<std::string::String>,
pub(crate) hpo_config: std::option::Option<crate::model::HpoConfig>,
pub(crate) algorithm_hyper_parameters: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) feature_transformation_parameters: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) auto_ml_config: std::option::Option<crate::model::AutoMlConfig>,
pub(crate) optimization_objective: std::option::Option<crate::model::OptimizationObjective>,
}
impl Builder {
/// <p>Only events with a value greater than or equal to this threshold are
/// used for training a model.</p>
pub fn event_value_threshold(mut self, input: impl Into<std::string::String>) -> Self {
self.event_value_threshold = Some(input.into());
self
}
/// <p>Only events with a value greater than or equal to this threshold are
/// used for training a model.</p>
pub fn set_event_value_threshold(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.event_value_threshold = input;
self
}
/// <p>Describes the properties for hyperparameter optimization (HPO).</p>
pub fn hpo_config(mut self, input: crate::model::HpoConfig) -> Self {
self.hpo_config = Some(input);
self
}
/// <p>Describes the properties for hyperparameter optimization (HPO).</p>
pub fn set_hpo_config(
mut self,
input: std::option::Option<crate::model::HpoConfig>,
) -> Self {
self.hpo_config = input;
self
}
/// Adds a key-value pair to `algorithm_hyper_parameters`.
///
/// To override the contents of this collection use [`set_algorithm_hyper_parameters`](Self::set_algorithm_hyper_parameters).
///
/// <p>Lists the hyperparameter names and ranges.</p>
pub fn algorithm_hyper_parameters(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.algorithm_hyper_parameters.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.algorithm_hyper_parameters = Some(hash_map);
self
}
/// <p>Lists the hyperparameter names and ranges.</p>
pub fn set_algorithm_hyper_parameters(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.algorithm_hyper_parameters = input;
self
}
/// Adds a key-value pair to `feature_transformation_parameters`.
///
/// To override the contents of this collection use [`set_feature_transformation_parameters`](Self::set_feature_transformation_parameters).
///
/// <p>Lists the feature transformation parameters.</p>
pub fn feature_transformation_parameters(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.feature_transformation_parameters.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.feature_transformation_parameters = Some(hash_map);
self
}
/// <p>Lists the feature transformation parameters.</p>
pub fn set_feature_transformation_parameters(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.feature_transformation_parameters = input;
self
}
/// <p>The <a>AutoMLConfig</a> object containing a list of recipes to search
/// when AutoML is performed.</p>
pub fn auto_ml_config(mut self, input: crate::model::AutoMlConfig) -> Self {
self.auto_ml_config = Some(input);
self
}
/// <p>The <a>AutoMLConfig</a> object containing a list of recipes to search
/// when AutoML is performed.</p>
pub fn set_auto_ml_config(
mut self,
input: std::option::Option<crate::model::AutoMlConfig>,
) -> Self {
self.auto_ml_config = input;
self
}
/// <p>Describes the additional objective for the solution, such as maximizing streaming
/// minutes or increasing revenue. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/optimizing-solution-for-objective.html">Optimizing a solution</a>.</p>
pub fn optimization_objective(
mut self,
input: crate::model::OptimizationObjective,
) -> Self {
self.optimization_objective = Some(input);
self
}
/// <p>Describes the additional objective for the solution, such as maximizing streaming
/// minutes or increasing revenue. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/optimizing-solution-for-objective.html">Optimizing a solution</a>.</p>
pub fn set_optimization_objective(
mut self,
input: std::option::Option<crate::model::OptimizationObjective>,
) -> Self {
self.optimization_objective = input;
self
}
/// Consumes the builder and constructs a [`SolutionConfig`](crate::model::SolutionConfig)
pub fn build(self) -> crate::model::SolutionConfig {
crate::model::SolutionConfig {
event_value_threshold: self.event_value_threshold,
hpo_config: self.hpo_config,
algorithm_hyper_parameters: self.algorithm_hyper_parameters,
feature_transformation_parameters: self.feature_transformation_parameters,
auto_ml_config: self.auto_ml_config,
optimization_objective: self.optimization_objective,
}
}
}
}
impl SolutionConfig {
/// Creates a new builder-style object to manufacture [`SolutionConfig`](crate::model::SolutionConfig)
pub fn builder() -> crate::model::solution_config::Builder {
crate::model::solution_config::Builder::default()
}
}
/// <p>Describes the additional objective for the solution, such as maximizing streaming
/// minutes or increasing revenue. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/optimizing-solution-for-objective.html">Optimizing a solution</a>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct OptimizationObjective {
/// <p>The numerical metadata column in an Items dataset related to the optimization objective. For example, VIDEO_LENGTH (to maximize streaming minutes), or PRICE (to maximize revenue).</p>
pub item_attribute: std::option::Option<std::string::String>,
/// <p>Specifies how Amazon Personalize balances the importance of your optimization objective versus relevance.</p>
pub objective_sensitivity: std::option::Option<crate::model::ObjectiveSensitivity>,
}
impl OptimizationObjective {
/// <p>The numerical metadata column in an Items dataset related to the optimization objective. For example, VIDEO_LENGTH (to maximize streaming minutes), or PRICE (to maximize revenue).</p>
pub fn item_attribute(&self) -> std::option::Option<&str> {
self.item_attribute.as_deref()
}
/// <p>Specifies how Amazon Personalize balances the importance of your optimization objective versus relevance.</p>
pub fn objective_sensitivity(
&self,
) -> std::option::Option<&crate::model::ObjectiveSensitivity> {
self.objective_sensitivity.as_ref()
}
}
impl std::fmt::Debug for OptimizationObjective {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("OptimizationObjective");
formatter.field("item_attribute", &self.item_attribute);
formatter.field("objective_sensitivity", &self.objective_sensitivity);
formatter.finish()
}
}
/// See [`OptimizationObjective`](crate::model::OptimizationObjective)
pub mod optimization_objective {
/// A builder for [`OptimizationObjective`](crate::model::OptimizationObjective)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) item_attribute: std::option::Option<std::string::String>,
pub(crate) objective_sensitivity: std::option::Option<crate::model::ObjectiveSensitivity>,
}
impl Builder {
/// <p>The numerical metadata column in an Items dataset related to the optimization objective. For example, VIDEO_LENGTH (to maximize streaming minutes), or PRICE (to maximize revenue).</p>
pub fn item_attribute(mut self, input: impl Into<std::string::String>) -> Self {
self.item_attribute = Some(input.into());
self
}
/// <p>The numerical metadata column in an Items dataset related to the optimization objective. For example, VIDEO_LENGTH (to maximize streaming minutes), or PRICE (to maximize revenue).</p>
pub fn set_item_attribute(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.item_attribute = input;
self
}
/// <p>Specifies how Amazon Personalize balances the importance of your optimization objective versus relevance.</p>
pub fn objective_sensitivity(mut self, input: crate::model::ObjectiveSensitivity) -> Self {
self.objective_sensitivity = Some(input);
self
}
/// <p>Specifies how Amazon Personalize balances the importance of your optimization objective versus relevance.</p>
pub fn set_objective_sensitivity(
mut self,
input: std::option::Option<crate::model::ObjectiveSensitivity>,
) -> Self {
self.objective_sensitivity = input;
self
}
/// Consumes the builder and constructs a [`OptimizationObjective`](crate::model::OptimizationObjective)
pub fn build(self) -> crate::model::OptimizationObjective {
crate::model::OptimizationObjective {
item_attribute: self.item_attribute,
objective_sensitivity: self.objective_sensitivity,
}
}
}
}
impl OptimizationObjective {
/// Creates a new builder-style object to manufacture [`OptimizationObjective`](crate::model::OptimizationObjective)
pub fn builder() -> crate::model::optimization_objective::Builder {
crate::model::optimization_objective::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum ObjectiveSensitivity {
#[allow(missing_docs)] // documentation missing in model
High,
#[allow(missing_docs)] // documentation missing in model
Low,
#[allow(missing_docs)] // documentation missing in model
Medium,
#[allow(missing_docs)] // documentation missing in model
Off,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for ObjectiveSensitivity {
fn from(s: &str) -> Self {
match s {
"HIGH" => ObjectiveSensitivity::High,
"LOW" => ObjectiveSensitivity::Low,
"MEDIUM" => ObjectiveSensitivity::Medium,
"OFF" => ObjectiveSensitivity::Off,
other => ObjectiveSensitivity::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for ObjectiveSensitivity {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(ObjectiveSensitivity::from(s))
}
}
impl ObjectiveSensitivity {
/// Returns the `&str` value of the enum member.
pub fn as_str(&self) -> &str {
match self {
ObjectiveSensitivity::High => "HIGH",
ObjectiveSensitivity::Low => "LOW",
ObjectiveSensitivity::Medium => "MEDIUM",
ObjectiveSensitivity::Off => "OFF",
ObjectiveSensitivity::Unknown(s) => s.as_ref(),
}
}
/// Returns all the `&str` values of the enum members.
pub fn values() -> &'static [&'static str] {
&["HIGH", "LOW", "MEDIUM", "OFF"]
}
}
impl AsRef<str> for ObjectiveSensitivity {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>When the solution performs AutoML (<code>performAutoML</code> is true in
/// <a>CreateSolution</a>), Amazon Personalize
/// determines which recipe, from the specified list, optimizes the given metric.
/// Amazon Personalize then uses that recipe for the solution.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AutoMlConfig {
/// <p>The metric to optimize.</p>
pub metric_name: std::option::Option<std::string::String>,
/// <p>The list of candidate recipes.</p>
pub recipe_list: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl AutoMlConfig {
/// <p>The metric to optimize.</p>
pub fn metric_name(&self) -> std::option::Option<&str> {
self.metric_name.as_deref()
}
/// <p>The list of candidate recipes.</p>
pub fn recipe_list(&self) -> std::option::Option<&[std::string::String]> {
self.recipe_list.as_deref()
}
}
impl std::fmt::Debug for AutoMlConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AutoMlConfig");
formatter.field("metric_name", &self.metric_name);
formatter.field("recipe_list", &self.recipe_list);
formatter.finish()
}
}
/// See [`AutoMlConfig`](crate::model::AutoMlConfig)
pub mod auto_ml_config {
/// A builder for [`AutoMlConfig`](crate::model::AutoMlConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) metric_name: std::option::Option<std::string::String>,
pub(crate) recipe_list: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl Builder {
/// <p>The metric to optimize.</p>
pub fn metric_name(mut self, input: impl Into<std::string::String>) -> Self {
self.metric_name = Some(input.into());
self
}
/// <p>The metric to optimize.</p>
pub fn set_metric_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.metric_name = input;
self
}
/// Appends an item to `recipe_list`.
///
/// To override the contents of this collection use [`set_recipe_list`](Self::set_recipe_list).
///
/// <p>The list of candidate recipes.</p>
pub fn recipe_list(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.recipe_list.unwrap_or_default();
v.push(input.into());
self.recipe_list = Some(v);
self
}
/// <p>The list of candidate recipes.</p>
pub fn set_recipe_list(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.recipe_list = input;
self
}
/// Consumes the builder and constructs a [`AutoMlConfig`](crate::model::AutoMlConfig)
pub fn build(self) -> crate::model::AutoMlConfig {
crate::model::AutoMlConfig {
metric_name: self.metric_name,
recipe_list: self.recipe_list,
}
}
}
}
impl AutoMlConfig {
/// Creates a new builder-style object to manufacture [`AutoMlConfig`](crate::model::AutoMlConfig)
pub fn builder() -> crate::model::auto_ml_config::Builder {
crate::model::auto_ml_config::Builder::default()
}
}
/// <p>Describes the properties for hyperparameter optimization (HPO).</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct HpoConfig {
/// <p>The metric to optimize during HPO.</p>
/// <note>
/// <p>Amazon Personalize doesn't support configuring the <code>hpoObjective</code>
/// at this time.</p>
/// </note>
pub hpo_objective: std::option::Option<crate::model::HpoObjective>,
/// <p>Describes the resource configuration for HPO.</p>
pub hpo_resource_config: std::option::Option<crate::model::HpoResourceConfig>,
/// <p>The hyperparameters and their allowable ranges.</p>
pub algorithm_hyper_parameter_ranges: std::option::Option<crate::model::HyperParameterRanges>,
}
impl HpoConfig {
/// <p>The metric to optimize during HPO.</p>
/// <note>
/// <p>Amazon Personalize doesn't support configuring the <code>hpoObjective</code>
/// at this time.</p>
/// </note>
pub fn hpo_objective(&self) -> std::option::Option<&crate::model::HpoObjective> {
self.hpo_objective.as_ref()
}
/// <p>Describes the resource configuration for HPO.</p>
pub fn hpo_resource_config(&self) -> std::option::Option<&crate::model::HpoResourceConfig> {
self.hpo_resource_config.as_ref()
}
/// <p>The hyperparameters and their allowable ranges.</p>
pub fn algorithm_hyper_parameter_ranges(
&self,
) -> std::option::Option<&crate::model::HyperParameterRanges> {
self.algorithm_hyper_parameter_ranges.as_ref()
}
}
impl std::fmt::Debug for HpoConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("HpoConfig");
formatter.field("hpo_objective", &self.hpo_objective);
formatter.field("hpo_resource_config", &self.hpo_resource_config);
formatter.field(
"algorithm_hyper_parameter_ranges",
&self.algorithm_hyper_parameter_ranges,
);
formatter.finish()
}
}
/// See [`HpoConfig`](crate::model::HpoConfig)
pub mod hpo_config {
/// A builder for [`HpoConfig`](crate::model::HpoConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) hpo_objective: std::option::Option<crate::model::HpoObjective>,
pub(crate) hpo_resource_config: std::option::Option<crate::model::HpoResourceConfig>,
pub(crate) algorithm_hyper_parameter_ranges:
std::option::Option<crate::model::HyperParameterRanges>,
}
impl Builder {
/// <p>The metric to optimize during HPO.</p>
/// <note>
/// <p>Amazon Personalize doesn't support configuring the <code>hpoObjective</code>
/// at this time.</p>
/// </note>
pub fn hpo_objective(mut self, input: crate::model::HpoObjective) -> Self {
self.hpo_objective = Some(input);
self
}
/// <p>The metric to optimize during HPO.</p>
/// <note>
/// <p>Amazon Personalize doesn't support configuring the <code>hpoObjective</code>
/// at this time.</p>
/// </note>
pub fn set_hpo_objective(
mut self,
input: std::option::Option<crate::model::HpoObjective>,
) -> Self {
self.hpo_objective = input;
self
}
/// <p>Describes the resource configuration for HPO.</p>
pub fn hpo_resource_config(mut self, input: crate::model::HpoResourceConfig) -> Self {
self.hpo_resource_config = Some(input);
self
}
/// <p>Describes the resource configuration for HPO.</p>
pub fn set_hpo_resource_config(
mut self,
input: std::option::Option<crate::model::HpoResourceConfig>,
) -> Self {
self.hpo_resource_config = input;
self
}
/// <p>The hyperparameters and their allowable ranges.</p>
pub fn algorithm_hyper_parameter_ranges(
mut self,
input: crate::model::HyperParameterRanges,
) -> Self {
self.algorithm_hyper_parameter_ranges = Some(input);
self
}
/// <p>The hyperparameters and their allowable ranges.</p>
pub fn set_algorithm_hyper_parameter_ranges(
mut self,
input: std::option::Option<crate::model::HyperParameterRanges>,
) -> Self {
self.algorithm_hyper_parameter_ranges = input;
self
}
/// Consumes the builder and constructs a [`HpoConfig`](crate::model::HpoConfig)
pub fn build(self) -> crate::model::HpoConfig {
crate::model::HpoConfig {
hpo_objective: self.hpo_objective,
hpo_resource_config: self.hpo_resource_config,
algorithm_hyper_parameter_ranges: self.algorithm_hyper_parameter_ranges,
}
}
}
}
impl HpoConfig {
/// Creates a new builder-style object to manufacture [`HpoConfig`](crate::model::HpoConfig)
pub fn builder() -> crate::model::hpo_config::Builder {
crate::model::hpo_config::Builder::default()
}
}
/// <p>Specifies the hyperparameters and their ranges.
/// Hyperparameters can be categorical, continuous, or integer-valued.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct HyperParameterRanges {
/// <p>The integer-valued hyperparameters and their ranges.</p>
pub integer_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::IntegerHyperParameterRange>>,
/// <p>The continuous hyperparameters and their ranges.</p>
pub continuous_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::ContinuousHyperParameterRange>>,
/// <p>The categorical hyperparameters and their ranges.</p>
pub categorical_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::CategoricalHyperParameterRange>>,
}
impl HyperParameterRanges {
/// <p>The integer-valued hyperparameters and their ranges.</p>
pub fn integer_hyper_parameter_ranges(
&self,
) -> std::option::Option<&[crate::model::IntegerHyperParameterRange]> {
self.integer_hyper_parameter_ranges.as_deref()
}
/// <p>The continuous hyperparameters and their ranges.</p>
pub fn continuous_hyper_parameter_ranges(
&self,
) -> std::option::Option<&[crate::model::ContinuousHyperParameterRange]> {
self.continuous_hyper_parameter_ranges.as_deref()
}
/// <p>The categorical hyperparameters and their ranges.</p>
pub fn categorical_hyper_parameter_ranges(
&self,
) -> std::option::Option<&[crate::model::CategoricalHyperParameterRange]> {
self.categorical_hyper_parameter_ranges.as_deref()
}
}
impl std::fmt::Debug for HyperParameterRanges {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("HyperParameterRanges");
formatter.field(
"integer_hyper_parameter_ranges",
&self.integer_hyper_parameter_ranges,
);
formatter.field(
"continuous_hyper_parameter_ranges",
&self.continuous_hyper_parameter_ranges,
);
formatter.field(
"categorical_hyper_parameter_ranges",
&self.categorical_hyper_parameter_ranges,
);
formatter.finish()
}
}
/// See [`HyperParameterRanges`](crate::model::HyperParameterRanges)
pub mod hyper_parameter_ranges {
/// A builder for [`HyperParameterRanges`](crate::model::HyperParameterRanges)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) integer_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::IntegerHyperParameterRange>>,
pub(crate) continuous_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::ContinuousHyperParameterRange>>,
pub(crate) categorical_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::CategoricalHyperParameterRange>>,
}
impl Builder {
/// Appends an item to `integer_hyper_parameter_ranges`.
///
/// To override the contents of this collection use [`set_integer_hyper_parameter_ranges`](Self::set_integer_hyper_parameter_ranges).
///
/// <p>The integer-valued hyperparameters and their ranges.</p>
pub fn integer_hyper_parameter_ranges(
mut self,
input: impl Into<crate::model::IntegerHyperParameterRange>,
) -> Self {
let mut v = self.integer_hyper_parameter_ranges.unwrap_or_default();
v.push(input.into());
self.integer_hyper_parameter_ranges = Some(v);
self
}
/// <p>The integer-valued hyperparameters and their ranges.</p>
pub fn set_integer_hyper_parameter_ranges(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::IntegerHyperParameterRange>>,
) -> Self {
self.integer_hyper_parameter_ranges = input;
self
}
/// Appends an item to `continuous_hyper_parameter_ranges`.
///
/// To override the contents of this collection use [`set_continuous_hyper_parameter_ranges`](Self::set_continuous_hyper_parameter_ranges).
///
/// <p>The continuous hyperparameters and their ranges.</p>
pub fn continuous_hyper_parameter_ranges(
mut self,
input: impl Into<crate::model::ContinuousHyperParameterRange>,
) -> Self {
let mut v = self.continuous_hyper_parameter_ranges.unwrap_or_default();
v.push(input.into());
self.continuous_hyper_parameter_ranges = Some(v);
self
}
/// <p>The continuous hyperparameters and their ranges.</p>
pub fn set_continuous_hyper_parameter_ranges(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::ContinuousHyperParameterRange>>,
) -> Self {
self.continuous_hyper_parameter_ranges = input;
self
}
/// Appends an item to `categorical_hyper_parameter_ranges`.
///
/// To override the contents of this collection use [`set_categorical_hyper_parameter_ranges`](Self::set_categorical_hyper_parameter_ranges).
///
/// <p>The categorical hyperparameters and their ranges.</p>
pub fn categorical_hyper_parameter_ranges(
mut self,
input: impl Into<crate::model::CategoricalHyperParameterRange>,
) -> Self {
let mut v = self.categorical_hyper_parameter_ranges.unwrap_or_default();
v.push(input.into());
self.categorical_hyper_parameter_ranges = Some(v);
self
}
/// <p>The categorical hyperparameters and their ranges.</p>
pub fn set_categorical_hyper_parameter_ranges(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::CategoricalHyperParameterRange>>,
) -> Self {
self.categorical_hyper_parameter_ranges = input;
self
}
/// Consumes the builder and constructs a [`HyperParameterRanges`](crate::model::HyperParameterRanges)
pub fn build(self) -> crate::model::HyperParameterRanges {
crate::model::HyperParameterRanges {
integer_hyper_parameter_ranges: self.integer_hyper_parameter_ranges,
continuous_hyper_parameter_ranges: self.continuous_hyper_parameter_ranges,
categorical_hyper_parameter_ranges: self.categorical_hyper_parameter_ranges,
}
}
}
}
impl HyperParameterRanges {
/// Creates a new builder-style object to manufacture [`HyperParameterRanges`](crate::model::HyperParameterRanges)
pub fn builder() -> crate::model::hyper_parameter_ranges::Builder {
crate::model::hyper_parameter_ranges::Builder::default()
}
}
/// <p>Provides the name and range of a categorical hyperparameter.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CategoricalHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>A list of the categories for the hyperparameter.</p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl CategoricalHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>A list of the categories for the hyperparameter.</p>
pub fn values(&self) -> std::option::Option<&[std::string::String]> {
self.values.as_deref()
}
}
impl std::fmt::Debug for CategoricalHyperParameterRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CategoricalHyperParameterRange");
formatter.field("name", &self.name);
formatter.field("values", &self.values);
formatter.finish()
}
}
/// See [`CategoricalHyperParameterRange`](crate::model::CategoricalHyperParameterRange)
pub mod categorical_hyper_parameter_range {
/// A builder for [`CategoricalHyperParameterRange`](crate::model::CategoricalHyperParameterRange)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl Builder {
/// <p>The name of the hyperparameter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the hyperparameter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// Appends an item to `values`.
///
/// To override the contents of this collection use [`set_values`](Self::set_values).
///
/// <p>A list of the categories for the hyperparameter.</p>
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
/// <p>A list of the categories for the hyperparameter.</p>
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
/// Consumes the builder and constructs a [`CategoricalHyperParameterRange`](crate::model::CategoricalHyperParameterRange)
pub fn build(self) -> crate::model::CategoricalHyperParameterRange {
crate::model::CategoricalHyperParameterRange {
name: self.name,
values: self.values,
}
}
}
}
impl CategoricalHyperParameterRange {
/// Creates a new builder-style object to manufacture [`CategoricalHyperParameterRange`](crate::model::CategoricalHyperParameterRange)
pub fn builder() -> crate::model::categorical_hyper_parameter_range::Builder {
crate::model::categorical_hyper_parameter_range::Builder::default()
}
}
/// <p>Provides the name and range of a continuous hyperparameter.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ContinuousHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The minimum allowable value for the hyperparameter.</p>
pub min_value: f64,
/// <p>The maximum allowable value for the hyperparameter.</p>
pub max_value: f64,
}
impl ContinuousHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(&self) -> f64 {
self.min_value
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(&self) -> f64 {
self.max_value
}
}
impl std::fmt::Debug for ContinuousHyperParameterRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ContinuousHyperParameterRange");
formatter.field("name", &self.name);
formatter.field("min_value", &self.min_value);
formatter.field("max_value", &self.max_value);
formatter.finish()
}
}
/// See [`ContinuousHyperParameterRange`](crate::model::ContinuousHyperParameterRange)
pub mod continuous_hyper_parameter_range {
/// A builder for [`ContinuousHyperParameterRange`](crate::model::ContinuousHyperParameterRange)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) min_value: std::option::Option<f64>,
pub(crate) max_value: std::option::Option<f64>,
}
impl Builder {
/// <p>The name of the hyperparameter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the hyperparameter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(mut self, input: f64) -> Self {
self.min_value = Some(input);
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn set_min_value(mut self, input: std::option::Option<f64>) -> Self {
self.min_value = input;
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(mut self, input: f64) -> Self {
self.max_value = Some(input);
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn set_max_value(mut self, input: std::option::Option<f64>) -> Self {
self.max_value = input;
self
}
/// Consumes the builder and constructs a [`ContinuousHyperParameterRange`](crate::model::ContinuousHyperParameterRange)
pub fn build(self) -> crate::model::ContinuousHyperParameterRange {
crate::model::ContinuousHyperParameterRange {
name: self.name,
min_value: self.min_value.unwrap_or_default(),
max_value: self.max_value.unwrap_or_default(),
}
}
}
}
impl ContinuousHyperParameterRange {
/// Creates a new builder-style object to manufacture [`ContinuousHyperParameterRange`](crate::model::ContinuousHyperParameterRange)
pub fn builder() -> crate::model::continuous_hyper_parameter_range::Builder {
crate::model::continuous_hyper_parameter_range::Builder::default()
}
}
/// <p>Provides the name and range of an integer-valued hyperparameter.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct IntegerHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The minimum allowable value for the hyperparameter.</p>
pub min_value: i32,
/// <p>The maximum allowable value for the hyperparameter.</p>
pub max_value: i32,
}
impl IntegerHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(&self) -> i32 {
self.min_value
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(&self) -> i32 {
self.max_value
}
}
impl std::fmt::Debug for IntegerHyperParameterRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("IntegerHyperParameterRange");
formatter.field("name", &self.name);
formatter.field("min_value", &self.min_value);
formatter.field("max_value", &self.max_value);
formatter.finish()
}
}
/// See [`IntegerHyperParameterRange`](crate::model::IntegerHyperParameterRange)
pub mod integer_hyper_parameter_range {
/// A builder for [`IntegerHyperParameterRange`](crate::model::IntegerHyperParameterRange)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) min_value: std::option::Option<i32>,
pub(crate) max_value: std::option::Option<i32>,
}
impl Builder {
/// <p>The name of the hyperparameter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the hyperparameter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(mut self, input: i32) -> Self {
self.min_value = Some(input);
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn set_min_value(mut self, input: std::option::Option<i32>) -> Self {
self.min_value = input;
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(mut self, input: i32) -> Self {
self.max_value = Some(input);
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn set_max_value(mut self, input: std::option::Option<i32>) -> Self {
self.max_value = input;
self
}
/// Consumes the builder and constructs a [`IntegerHyperParameterRange`](crate::model::IntegerHyperParameterRange)
pub fn build(self) -> crate::model::IntegerHyperParameterRange {
crate::model::IntegerHyperParameterRange {
name: self.name,
min_value: self.min_value.unwrap_or_default(),
max_value: self.max_value.unwrap_or_default(),
}
}
}
}
impl IntegerHyperParameterRange {
/// Creates a new builder-style object to manufacture [`IntegerHyperParameterRange`](crate::model::IntegerHyperParameterRange)
pub fn builder() -> crate::model::integer_hyper_parameter_range::Builder {
crate::model::integer_hyper_parameter_range::Builder::default()
}
}
/// <p>Describes the resource configuration for hyperparameter optimization (HPO).</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct HpoResourceConfig {
/// <p>The maximum number of training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxNumberOfTrainingJobs</code> is
/// <code>40</code>.</p>
pub max_number_of_training_jobs: std::option::Option<std::string::String>,
/// <p>The maximum number of parallel training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxParallelTrainingJobs</code> is
/// <code>10</code>.</p>
pub max_parallel_training_jobs: std::option::Option<std::string::String>,
}
impl HpoResourceConfig {
/// <p>The maximum number of training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxNumberOfTrainingJobs</code> is
/// <code>40</code>.</p>
pub fn max_number_of_training_jobs(&self) -> std::option::Option<&str> {
self.max_number_of_training_jobs.as_deref()
}
/// <p>The maximum number of parallel training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxParallelTrainingJobs</code> is
/// <code>10</code>.</p>
pub fn max_parallel_training_jobs(&self) -> std::option::Option<&str> {
self.max_parallel_training_jobs.as_deref()
}
}
impl std::fmt::Debug for HpoResourceConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("HpoResourceConfig");
formatter.field(
"max_number_of_training_jobs",
&self.max_number_of_training_jobs,
);
formatter.field(
"max_parallel_training_jobs",
&self.max_parallel_training_jobs,
);
formatter.finish()
}
}
/// See [`HpoResourceConfig`](crate::model::HpoResourceConfig)
pub mod hpo_resource_config {
/// A builder for [`HpoResourceConfig`](crate::model::HpoResourceConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) max_number_of_training_jobs: std::option::Option<std::string::String>,
pub(crate) max_parallel_training_jobs: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The maximum number of training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxNumberOfTrainingJobs</code> is
/// <code>40</code>.</p>
pub fn max_number_of_training_jobs(
mut self,
input: impl Into<std::string::String>,
) -> Self {
self.max_number_of_training_jobs = Some(input.into());
self
}
/// <p>The maximum number of training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxNumberOfTrainingJobs</code> is
/// <code>40</code>.</p>
pub fn set_max_number_of_training_jobs(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.max_number_of_training_jobs = input;
self
}
/// <p>The maximum number of parallel training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxParallelTrainingJobs</code> is
/// <code>10</code>.</p>
pub fn max_parallel_training_jobs(mut self, input: impl Into<std::string::String>) -> Self {
self.max_parallel_training_jobs = Some(input.into());
self
}
/// <p>The maximum number of parallel training
/// jobs when you create a
/// solution
/// version.
/// The maximum value for <code>maxParallelTrainingJobs</code> is
/// <code>10</code>.</p>
pub fn set_max_parallel_training_jobs(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.max_parallel_training_jobs = input;
self
}
/// Consumes the builder and constructs a [`HpoResourceConfig`](crate::model::HpoResourceConfig)
pub fn build(self) -> crate::model::HpoResourceConfig {
crate::model::HpoResourceConfig {
max_number_of_training_jobs: self.max_number_of_training_jobs,
max_parallel_training_jobs: self.max_parallel_training_jobs,
}
}
}
}
impl HpoResourceConfig {
/// Creates a new builder-style object to manufacture [`HpoResourceConfig`](crate::model::HpoResourceConfig)
pub fn builder() -> crate::model::hpo_resource_config::Builder {
crate::model::hpo_resource_config::Builder::default()
}
}
/// <p>The metric to optimize during hyperparameter optimization (HPO).</p>
/// <note>
/// <p>Amazon Personalize doesn't support configuring the <code>hpoObjective</code>
/// at this time.</p>
/// </note>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct HpoObjective {
/// <p>The type of the metric. Valid values are <code>Maximize</code> and <code>Minimize</code>.</p>
pub r#type: std::option::Option<std::string::String>,
/// <p>The name of the metric.</p>
pub metric_name: std::option::Option<std::string::String>,
/// <p>A regular expression for finding the metric in the training job logs.</p>
pub metric_regex: std::option::Option<std::string::String>,
}
impl HpoObjective {
/// <p>The type of the metric. Valid values are <code>Maximize</code> and <code>Minimize</code>.</p>
pub fn r#type(&self) -> std::option::Option<&str> {
self.r#type.as_deref()
}
/// <p>The name of the metric.</p>
pub fn metric_name(&self) -> std::option::Option<&str> {
self.metric_name.as_deref()
}
/// <p>A regular expression for finding the metric in the training job logs.</p>
pub fn metric_regex(&self) -> std::option::Option<&str> {
self.metric_regex.as_deref()
}
}
impl std::fmt::Debug for HpoObjective {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("HpoObjective");
formatter.field("r#type", &self.r#type);
formatter.field("metric_name", &self.metric_name);
formatter.field("metric_regex", &self.metric_regex);
formatter.finish()
}
}
/// See [`HpoObjective`](crate::model::HpoObjective)
pub mod hpo_objective {
/// A builder for [`HpoObjective`](crate::model::HpoObjective)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) r#type: std::option::Option<std::string::String>,
pub(crate) metric_name: std::option::Option<std::string::String>,
pub(crate) metric_regex: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The type of the metric. Valid values are <code>Maximize</code> and <code>Minimize</code>.</p>
pub fn r#type(mut self, input: impl Into<std::string::String>) -> Self {
self.r#type = Some(input.into());
self
}
/// <p>The type of the metric. Valid values are <code>Maximize</code> and <code>Minimize</code>.</p>
pub fn set_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.r#type = input;
self
}
/// <p>The name of the metric.</p>
pub fn metric_name(mut self, input: impl Into<std::string::String>) -> Self {
self.metric_name = Some(input.into());
self
}
/// <p>The name of the metric.</p>
pub fn set_metric_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.metric_name = input;
self
}
/// <p>A regular expression for finding the metric in the training job logs.</p>
pub fn metric_regex(mut self, input: impl Into<std::string::String>) -> Self {
self.metric_regex = Some(input.into());
self
}
/// <p>A regular expression for finding the metric in the training job logs.</p>
pub fn set_metric_regex(mut self, input: std::option::Option<std::string::String>) -> Self {
self.metric_regex = input;
self
}
/// Consumes the builder and constructs a [`HpoObjective`](crate::model::HpoObjective)
pub fn build(self) -> crate::model::HpoObjective {
crate::model::HpoObjective {
r#type: self.r#type,
metric_name: self.metric_name,
metric_regex: self.metric_regex,
}
}
}
}
impl HpoObjective {
/// Creates a new builder-style object to manufacture [`HpoObjective`](crate::model::HpoObjective)
pub fn builder() -> crate::model::hpo_objective::Builder {
crate::model::hpo_objective::Builder::default()
}
}
/// <p>An object that provides information about a solution. A solution is a trained model
/// that can be deployed as a campaign.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Solution {
/// <p>The name of the solution.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The ARN of the solution.</p>
pub solution_arn: std::option::Option<std::string::String>,
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The
/// default is <code>false</code>.</p>
pub perform_hpo: bool,
/// <p>When true, Amazon Personalize performs a search for the best USER_PERSONALIZATION recipe from
/// the list specified in the solution configuration (<code>recipeArn</code> must not be specified).
/// When false (the default), Amazon Personalize uses <code>recipeArn</code> for training.</p>
pub perform_auto_ml: bool,
/// <p>The ARN of the recipe used to create the solution.</p>
pub recipe_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset group that provides the training data.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>The event type (for example, 'click' or 'like') that is used for training the model.
/// If no <code>eventType</code> is provided, Amazon Personalize uses all interactions for training with
/// equal weight regardless of type.</p>
pub event_type: std::option::Option<std::string::String>,
/// <p>Describes the configuration properties for the solution.</p>
pub solution_config: std::option::Option<crate::model::SolutionConfig>,
/// <p>When <code>performAutoML</code> is true, specifies the best recipe found.</p>
pub auto_ml_result: std::option::Option<crate::model::AutoMlResult>,
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The creation date and time (in Unix time) of the solution.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>Describes the latest version of the solution, including the status and the ARN.</p>
pub latest_solution_version: std::option::Option<crate::model::SolutionVersionSummary>,
}
impl Solution {
/// <p>The name of the solution.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The ARN of the solution.</p>
pub fn solution_arn(&self) -> std::option::Option<&str> {
self.solution_arn.as_deref()
}
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The
/// default is <code>false</code>.</p>
pub fn perform_hpo(&self) -> bool {
self.perform_hpo
}
/// <p>When true, Amazon Personalize performs a search for the best USER_PERSONALIZATION recipe from
/// the list specified in the solution configuration (<code>recipeArn</code> must not be specified).
/// When false (the default), Amazon Personalize uses <code>recipeArn</code> for training.</p>
pub fn perform_auto_ml(&self) -> bool {
self.perform_auto_ml
}
/// <p>The ARN of the recipe used to create the solution.</p>
pub fn recipe_arn(&self) -> std::option::Option<&str> {
self.recipe_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset group that provides the training data.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>The event type (for example, 'click' or 'like') that is used for training the model.
/// If no <code>eventType</code> is provided, Amazon Personalize uses all interactions for training with
/// equal weight regardless of type.</p>
pub fn event_type(&self) -> std::option::Option<&str> {
self.event_type.as_deref()
}
/// <p>Describes the configuration properties for the solution.</p>
pub fn solution_config(&self) -> std::option::Option<&crate::model::SolutionConfig> {
self.solution_config.as_ref()
}
/// <p>When <code>performAutoML</code> is true, specifies the best recipe found.</p>
pub fn auto_ml_result(&self) -> std::option::Option<&crate::model::AutoMlResult> {
self.auto_ml_result.as_ref()
}
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The creation date and time (in Unix time) of the solution.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>Describes the latest version of the solution, including the status and the ARN.</p>
pub fn latest_solution_version(
&self,
) -> std::option::Option<&crate::model::SolutionVersionSummary> {
self.latest_solution_version.as_ref()
}
}
impl std::fmt::Debug for Solution {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Solution");
formatter.field("name", &self.name);
formatter.field("solution_arn", &self.solution_arn);
formatter.field("perform_hpo", &self.perform_hpo);
formatter.field("perform_auto_ml", &self.perform_auto_ml);
formatter.field("recipe_arn", &self.recipe_arn);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("event_type", &self.event_type);
formatter.field("solution_config", &self.solution_config);
formatter.field("auto_ml_result", &self.auto_ml_result);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("latest_solution_version", &self.latest_solution_version);
formatter.finish()
}
}
/// See [`Solution`](crate::model::Solution)
pub mod solution {
/// A builder for [`Solution`](crate::model::Solution)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) solution_arn: std::option::Option<std::string::String>,
pub(crate) perform_hpo: std::option::Option<bool>,
pub(crate) perform_auto_ml: std::option::Option<bool>,
pub(crate) recipe_arn: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) event_type: std::option::Option<std::string::String>,
pub(crate) solution_config: std::option::Option<crate::model::SolutionConfig>,
pub(crate) auto_ml_result: std::option::Option<crate::model::AutoMlResult>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) latest_solution_version:
std::option::Option<crate::model::SolutionVersionSummary>,
}
impl Builder {
/// <p>The name of the solution.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the solution.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The ARN of the solution.</p>
pub fn solution_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_arn = Some(input.into());
self
}
/// <p>The ARN of the solution.</p>
pub fn set_solution_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.solution_arn = input;
self
}
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The
/// default is <code>false</code>.</p>
pub fn perform_hpo(mut self, input: bool) -> Self {
self.perform_hpo = Some(input);
self
}
/// <p>Whether to perform hyperparameter optimization (HPO) on the chosen recipe. The
/// default is <code>false</code>.</p>
pub fn set_perform_hpo(mut self, input: std::option::Option<bool>) -> Self {
self.perform_hpo = input;
self
}
/// <p>When true, Amazon Personalize performs a search for the best USER_PERSONALIZATION recipe from
/// the list specified in the solution configuration (<code>recipeArn</code> must not be specified).
/// When false (the default), Amazon Personalize uses <code>recipeArn</code> for training.</p>
pub fn perform_auto_ml(mut self, input: bool) -> Self {
self.perform_auto_ml = Some(input);
self
}
/// <p>When true, Amazon Personalize performs a search for the best USER_PERSONALIZATION recipe from
/// the list specified in the solution configuration (<code>recipeArn</code> must not be specified).
/// When false (the default), Amazon Personalize uses <code>recipeArn</code> for training.</p>
pub fn set_perform_auto_ml(mut self, input: std::option::Option<bool>) -> Self {
self.perform_auto_ml = input;
self
}
/// <p>The ARN of the recipe used to create the solution.</p>
pub fn recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_arn = Some(input.into());
self
}
/// <p>The ARN of the recipe used to create the solution.</p>
pub fn set_recipe_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group that provides the training data.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group that provides the training data.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>The event type (for example, 'click' or 'like') that is used for training the model.
/// If no <code>eventType</code> is provided, Amazon Personalize uses all interactions for training with
/// equal weight regardless of type.</p>
pub fn event_type(mut self, input: impl Into<std::string::String>) -> Self {
self.event_type = Some(input.into());
self
}
/// <p>The event type (for example, 'click' or 'like') that is used for training the model.
/// If no <code>eventType</code> is provided, Amazon Personalize uses all interactions for training with
/// equal weight regardless of type.</p>
pub fn set_event_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.event_type = input;
self
}
/// <p>Describes the configuration properties for the solution.</p>
pub fn solution_config(mut self, input: crate::model::SolutionConfig) -> Self {
self.solution_config = Some(input);
self
}
/// <p>Describes the configuration properties for the solution.</p>
pub fn set_solution_config(
mut self,
input: std::option::Option<crate::model::SolutionConfig>,
) -> Self {
self.solution_config = input;
self
}
/// <p>When <code>performAutoML</code> is true, specifies the best recipe found.</p>
pub fn auto_ml_result(mut self, input: crate::model::AutoMlResult) -> Self {
self.auto_ml_result = Some(input);
self
}
/// <p>When <code>performAutoML</code> is true, specifies the best recipe found.</p>
pub fn set_auto_ml_result(
mut self,
input: std::option::Option<crate::model::AutoMlResult>,
) -> Self {
self.auto_ml_result = input;
self
}
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the solution.</p>
/// <p>A solution can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The creation date and time (in Unix time) of the solution.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The creation date and time (in Unix time) of the solution.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the solution was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>Describes the latest version of the solution, including the status and the ARN.</p>
pub fn latest_solution_version(
mut self,
input: crate::model::SolutionVersionSummary,
) -> Self {
self.latest_solution_version = Some(input);
self
}
/// <p>Describes the latest version of the solution, including the status and the ARN.</p>
pub fn set_latest_solution_version(
mut self,
input: std::option::Option<crate::model::SolutionVersionSummary>,
) -> Self {
self.latest_solution_version = input;
self
}
/// Consumes the builder and constructs a [`Solution`](crate::model::Solution)
pub fn build(self) -> crate::model::Solution {
crate::model::Solution {
name: self.name,
solution_arn: self.solution_arn,
perform_hpo: self.perform_hpo.unwrap_or_default(),
perform_auto_ml: self.perform_auto_ml.unwrap_or_default(),
recipe_arn: self.recipe_arn,
dataset_group_arn: self.dataset_group_arn,
event_type: self.event_type,
solution_config: self.solution_config,
auto_ml_result: self.auto_ml_result,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
latest_solution_version: self.latest_solution_version,
}
}
}
}
impl Solution {
/// Creates a new builder-style object to manufacture [`Solution`](crate::model::Solution)
pub fn builder() -> crate::model::solution::Builder {
crate::model::solution::Builder::default()
}
}
/// <p>When the solution performs AutoML (<code>performAutoML</code> is true in
/// <a>CreateSolution</a>), specifies the recipe that best optimized the
/// specified metric.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AutoMlResult {
/// <p>The Amazon Resource Name (ARN) of the best recipe.</p>
pub best_recipe_arn: std::option::Option<std::string::String>,
}
impl AutoMlResult {
/// <p>The Amazon Resource Name (ARN) of the best recipe.</p>
pub fn best_recipe_arn(&self) -> std::option::Option<&str> {
self.best_recipe_arn.as_deref()
}
}
impl std::fmt::Debug for AutoMlResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AutoMlResult");
formatter.field("best_recipe_arn", &self.best_recipe_arn);
formatter.finish()
}
}
/// See [`AutoMlResult`](crate::model::AutoMlResult)
pub mod auto_ml_result {
/// A builder for [`AutoMlResult`](crate::model::AutoMlResult)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) best_recipe_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the best recipe.</p>
pub fn best_recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.best_recipe_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the best recipe.</p>
pub fn set_best_recipe_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.best_recipe_arn = input;
self
}
/// Consumes the builder and constructs a [`AutoMlResult`](crate::model::AutoMlResult)
pub fn build(self) -> crate::model::AutoMlResult {
crate::model::AutoMlResult {
best_recipe_arn: self.best_recipe_arn,
}
}
}
}
impl AutoMlResult {
/// Creates a new builder-style object to manufacture [`AutoMlResult`](crate::model::AutoMlResult)
pub fn builder() -> crate::model::auto_ml_result::Builder {
crate::model::auto_ml_result::Builder::default()
}
}
/// <p>Describes the schema for a dataset. For more information on schemas, see
/// <a>CreateSchema</a>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetSchema {
/// <p>The name of the schema.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub schema_arn: std::option::Option<std::string::String>,
/// <p>The schema.</p>
pub schema: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub domain: std::option::Option<crate::model::Domain>,
}
impl DatasetSchema {
/// <p>The name of the schema.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub fn schema_arn(&self) -> std::option::Option<&str> {
self.schema_arn.as_deref()
}
/// <p>The schema.</p>
pub fn schema(&self) -> std::option::Option<&str> {
self.schema.as_deref()
}
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub fn domain(&self) -> std::option::Option<&crate::model::Domain> {
self.domain.as_ref()
}
}
impl std::fmt::Debug for DatasetSchema {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetSchema");
formatter.field("name", &self.name);
formatter.field("schema_arn", &self.schema_arn);
formatter.field("schema", &self.schema);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("domain", &self.domain);
formatter.finish()
}
}
/// See [`DatasetSchema`](crate::model::DatasetSchema)
pub mod dataset_schema {
/// A builder for [`DatasetSchema`](crate::model::DatasetSchema)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) schema_arn: std::option::Option<std::string::String>,
pub(crate) schema: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) domain: std::option::Option<crate::model::Domain>,
}
impl Builder {
/// <p>The name of the schema.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the schema.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub fn schema_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.schema_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the schema.</p>
pub fn set_schema_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.schema_arn = input;
self
}
/// <p>The schema.</p>
pub fn schema(mut self, input: impl Into<std::string::String>) -> Self {
self.schema = Some(input.into());
self
}
/// <p>The schema.</p>
pub fn set_schema(mut self, input: std::option::Option<std::string::String>) -> Self {
self.schema = input;
self
}
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the schema was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the schema was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub fn domain(mut self, input: crate::model::Domain) -> Self {
self.domain = Some(input);
self
}
/// <p>The domain of a schema that you created for a dataset in a Domain dataset group.</p>
pub fn set_domain(mut self, input: std::option::Option<crate::model::Domain>) -> Self {
self.domain = input;
self
}
/// Consumes the builder and constructs a [`DatasetSchema`](crate::model::DatasetSchema)
pub fn build(self) -> crate::model::DatasetSchema {
crate::model::DatasetSchema {
name: self.name,
schema_arn: self.schema_arn,
schema: self.schema,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
domain: self.domain,
}
}
}
}
impl DatasetSchema {
/// Creates a new builder-style object to manufacture [`DatasetSchema`](crate::model::DatasetSchema)
pub fn builder() -> crate::model::dataset_schema::Builder {
crate::model::dataset_schema::Builder::default()
}
}
/// <p>Describes a recommendation generator for a Domain dataset group. You create a recommender in a Domain dataset group
/// for a specific domain use case (domain recipe), and specify the recommender in a <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html">GetRecommendations</a> request.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Recommender {
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub recommender_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>The name of the recommender.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.
/// </p>
pub recipe_arn: std::option::Option<std::string::String>,
/// <p>The configuration details of the recommender.</p>
pub recommender_config: std::option::Option<crate::model::RecommenderConfig>,
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The status of the recommender.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>If a recommender fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>Provides a summary of the latest updates to the recommender. </p>
pub latest_recommender_update: std::option::Option<crate::model::RecommenderUpdateSummary>,
}
impl Recommender {
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub fn recommender_arn(&self) -> std::option::Option<&str> {
self.recommender_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>The name of the recommender.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.
/// </p>
pub fn recipe_arn(&self) -> std::option::Option<&str> {
self.recipe_arn.as_deref()
}
/// <p>The configuration details of the recommender.</p>
pub fn recommender_config(&self) -> std::option::Option<&crate::model::RecommenderConfig> {
self.recommender_config.as_ref()
}
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The status of the recommender.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>If a recommender fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>Provides a summary of the latest updates to the recommender. </p>
pub fn latest_recommender_update(
&self,
) -> std::option::Option<&crate::model::RecommenderUpdateSummary> {
self.latest_recommender_update.as_ref()
}
}
impl std::fmt::Debug for Recommender {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Recommender");
formatter.field("recommender_arn", &self.recommender_arn);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("name", &self.name);
formatter.field("recipe_arn", &self.recipe_arn);
formatter.field("recommender_config", &self.recommender_config);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("status", &self.status);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("latest_recommender_update", &self.latest_recommender_update);
formatter.finish()
}
}
/// See [`Recommender`](crate::model::Recommender)
pub mod recommender {
/// A builder for [`Recommender`](crate::model::Recommender)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) recommender_arn: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) recipe_arn: std::option::Option<std::string::String>,
pub(crate) recommender_config: std::option::Option<crate::model::RecommenderConfig>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) latest_recommender_update:
std::option::Option<crate::model::RecommenderUpdateSummary>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub fn recommender_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recommender_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the recommender.</p>
pub fn set_recommender_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.recommender_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the Domain dataset group that contains the recommender.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>The name of the recommender.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the recommender.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.
/// </p>
pub fn recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe (Domain dataset group use case) that the recommender was created for.
/// </p>
pub fn set_recipe_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_arn = input;
self
}
/// <p>The configuration details of the recommender.</p>
pub fn recommender_config(mut self, input: crate::model::RecommenderConfig) -> Self {
self.recommender_config = Some(input);
self
}
/// <p>The configuration details of the recommender.</p>
pub fn set_recommender_config(
mut self,
input: std::option::Option<crate::model::RecommenderConfig>,
) -> Self {
self.recommender_config = input;
self
}
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recommender was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recommender was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The status of the recommender.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the recommender.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>If a recommender fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a recommender fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>Provides a summary of the latest updates to the recommender. </p>
pub fn latest_recommender_update(
mut self,
input: crate::model::RecommenderUpdateSummary,
) -> Self {
self.latest_recommender_update = Some(input);
self
}
/// <p>Provides a summary of the latest updates to the recommender. </p>
pub fn set_latest_recommender_update(
mut self,
input: std::option::Option<crate::model::RecommenderUpdateSummary>,
) -> Self {
self.latest_recommender_update = input;
self
}
/// Consumes the builder and constructs a [`Recommender`](crate::model::Recommender)
pub fn build(self) -> crate::model::Recommender {
crate::model::Recommender {
recommender_arn: self.recommender_arn,
dataset_group_arn: self.dataset_group_arn,
name: self.name,
recipe_arn: self.recipe_arn,
recommender_config: self.recommender_config,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
status: self.status,
failure_reason: self.failure_reason,
latest_recommender_update: self.latest_recommender_update,
}
}
}
}
impl Recommender {
/// Creates a new builder-style object to manufacture [`Recommender`](crate::model::Recommender)
pub fn builder() -> crate::model::recommender::Builder {
crate::model::recommender::Builder::default()
}
}
/// <p>Provides a summary of the properties of a recommender update. For a complete listing, call the
/// <a>DescribeRecommender</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RecommenderUpdateSummary {
/// <p>The configuration details of the recommender update.</p>
pub recommender_config: std::option::Option<crate::model::RecommenderConfig>,
/// <p>The date and time (in Unix format) that the recommender update was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the recommender update was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The status of the recommender update.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>If a recommender update fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl RecommenderUpdateSummary {
/// <p>The configuration details of the recommender update.</p>
pub fn recommender_config(&self) -> std::option::Option<&crate::model::RecommenderConfig> {
self.recommender_config.as_ref()
}
/// <p>The date and time (in Unix format) that the recommender update was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the recommender update was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The status of the recommender update.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>If a recommender update fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for RecommenderUpdateSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RecommenderUpdateSummary");
formatter.field("recommender_config", &self.recommender_config);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("status", &self.status);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`RecommenderUpdateSummary`](crate::model::RecommenderUpdateSummary)
pub mod recommender_update_summary {
/// A builder for [`RecommenderUpdateSummary`](crate::model::RecommenderUpdateSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) recommender_config: std::option::Option<crate::model::RecommenderConfig>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The configuration details of the recommender update.</p>
pub fn recommender_config(mut self, input: crate::model::RecommenderConfig) -> Self {
self.recommender_config = Some(input);
self
}
/// <p>The configuration details of the recommender update.</p>
pub fn set_recommender_config(
mut self,
input: std::option::Option<crate::model::RecommenderConfig>,
) -> Self {
self.recommender_config = input;
self
}
/// <p>The date and time (in Unix format) that the recommender update was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recommender update was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the recommender update was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the recommender update was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The status of the recommender update.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the recommender update.</p>
/// <p>A recommender can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>If a recommender update fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a recommender update fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`RecommenderUpdateSummary`](crate::model::RecommenderUpdateSummary)
pub fn build(self) -> crate::model::RecommenderUpdateSummary {
crate::model::RecommenderUpdateSummary {
recommender_config: self.recommender_config,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
status: self.status,
failure_reason: self.failure_reason,
}
}
}
}
impl RecommenderUpdateSummary {
/// Creates a new builder-style object to manufacture [`RecommenderUpdateSummary`](crate::model::RecommenderUpdateSummary)
pub fn builder() -> crate::model::recommender_update_summary::Builder {
crate::model::recommender_update_summary::Builder::default()
}
}
/// <p>Provides information about a recipe. Each recipe provides an algorithm
/// that Amazon Personalize uses in model training when you use the <a>CreateSolution</a>
/// operation. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Recipe {
/// <p>The name of the recipe.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub recipe_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the algorithm that Amazon Personalize uses to train
/// the model.</p>
pub algorithm_arn: std::option::Option<std::string::String>,
/// <p>The ARN of the FeatureTransformation object.</p>
pub feature_transformation_arn: std::option::Option<std::string::String>,
/// <p>The status of the recipe.</p>
pub status: std::option::Option<std::string::String>,
/// <p>The description of the recipe.</p>
pub description: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix format) that the recipe was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>PERSONALIZED_RANKING</p>
/// </li>
/// <li>
/// <p>RELATED_ITEMS</p>
/// </li>
/// <li>
/// <p>USER_PERSONALIZATION</p>
/// </li>
/// </ul>
pub recipe_type: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix format) that the recipe was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Recipe {
/// <p>The name of the recipe.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub fn recipe_arn(&self) -> std::option::Option<&str> {
self.recipe_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the algorithm that Amazon Personalize uses to train
/// the model.</p>
pub fn algorithm_arn(&self) -> std::option::Option<&str> {
self.algorithm_arn.as_deref()
}
/// <p>The ARN of the FeatureTransformation object.</p>
pub fn feature_transformation_arn(&self) -> std::option::Option<&str> {
self.feature_transformation_arn.as_deref()
}
/// <p>The status of the recipe.</p>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The description of the recipe.</p>
pub fn description(&self) -> std::option::Option<&str> {
self.description.as_deref()
}
/// <p>The date and time (in Unix format) that the recipe was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>PERSONALIZED_RANKING</p>
/// </li>
/// <li>
/// <p>RELATED_ITEMS</p>
/// </li>
/// <li>
/// <p>USER_PERSONALIZATION</p>
/// </li>
/// </ul>
pub fn recipe_type(&self) -> std::option::Option<&str> {
self.recipe_type.as_deref()
}
/// <p>The date and time (in Unix format) that the recipe was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for Recipe {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Recipe");
formatter.field("name", &self.name);
formatter.field("recipe_arn", &self.recipe_arn);
formatter.field("algorithm_arn", &self.algorithm_arn);
formatter.field(
"feature_transformation_arn",
&self.feature_transformation_arn,
);
formatter.field("status", &self.status);
formatter.field("description", &self.description);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("recipe_type", &self.recipe_type);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`Recipe`](crate::model::Recipe)
pub mod recipe {
/// A builder for [`Recipe`](crate::model::Recipe)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) recipe_arn: std::option::Option<std::string::String>,
pub(crate) algorithm_arn: std::option::Option<std::string::String>,
pub(crate) feature_transformation_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) description: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) recipe_type: std::option::Option<std::string::String>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the recipe.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the recipe.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub fn recipe_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the recipe.</p>
pub fn set_recipe_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the algorithm that Amazon Personalize uses to train
/// the model.</p>
pub fn algorithm_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.algorithm_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the algorithm that Amazon Personalize uses to train
/// the model.</p>
pub fn set_algorithm_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.algorithm_arn = input;
self
}
/// <p>The ARN of the FeatureTransformation object.</p>
pub fn feature_transformation_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.feature_transformation_arn = Some(input.into());
self
}
/// <p>The ARN of the FeatureTransformation object.</p>
pub fn set_feature_transformation_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.feature_transformation_arn = input;
self
}
/// <p>The status of the recipe.</p>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the recipe.</p>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The description of the recipe.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.description = Some(input.into());
self
}
/// <p>The description of the recipe.</p>
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.description = input;
self
}
/// <p>The date and time (in Unix format) that the recipe was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recipe was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>PERSONALIZED_RANKING</p>
/// </li>
/// <li>
/// <p>RELATED_ITEMS</p>
/// </li>
/// <li>
/// <p>USER_PERSONALIZATION</p>
/// </li>
/// </ul>
pub fn recipe_type(mut self, input: impl Into<std::string::String>) -> Self {
self.recipe_type = Some(input.into());
self
}
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>PERSONALIZED_RANKING</p>
/// </li>
/// <li>
/// <p>RELATED_ITEMS</p>
/// </li>
/// <li>
/// <p>USER_PERSONALIZATION</p>
/// </li>
/// </ul>
pub fn set_recipe_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.recipe_type = input;
self
}
/// <p>The date and time (in Unix format) that the recipe was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the recipe was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`Recipe`](crate::model::Recipe)
pub fn build(self) -> crate::model::Recipe {
crate::model::Recipe {
name: self.name,
recipe_arn: self.recipe_arn,
algorithm_arn: self.algorithm_arn,
feature_transformation_arn: self.feature_transformation_arn,
status: self.status,
description: self.description,
creation_date_time: self.creation_date_time,
recipe_type: self.recipe_type,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl Recipe {
/// Creates a new builder-style object to manufacture [`Recipe`](crate::model::Recipe)
pub fn builder() -> crate::model::recipe::Builder {
crate::model::recipe::Builder::default()
}
}
/// <p>Contains information on a recommendation filter, including its ARN, status, and filter
/// expression.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Filter {
/// <p>The name of the filter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The ARN of the filter.</p>
pub filter_arn: std::option::Option<std::string::String>,
/// <p>The time at which the filter was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The time at which the filter was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>If the filter failed, the reason for its failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>Specifies the type of item interactions to filter out of recommendation results. The
/// filter expression must follow specific format rules. For information about filter expression structure and syntax, see
/// <a>filter-expressions</a>.</p>
pub filter_expression: std::option::Option<std::string::String>,
/// <p>The status of the filter.</p>
pub status: std::option::Option<std::string::String>,
}
impl Filter {
/// <p>The name of the filter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The ARN of the filter.</p>
pub fn filter_arn(&self) -> std::option::Option<&str> {
self.filter_arn.as_deref()
}
/// <p>The time at which the filter was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The time at which the filter was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>If the filter failed, the reason for its failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>Specifies the type of item interactions to filter out of recommendation results. The
/// filter expression must follow specific format rules. For information about filter expression structure and syntax, see
/// <a>filter-expressions</a>.</p>
pub fn filter_expression(&self) -> std::option::Option<&str> {
self.filter_expression.as_deref()
}
/// <p>The status of the filter.</p>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
}
impl std::fmt::Debug for Filter {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Filter");
formatter.field("name", &self.name);
formatter.field("filter_arn", &self.filter_arn);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("filter_expression", &"*** Sensitive Data Redacted ***");
formatter.field("status", &self.status);
formatter.finish()
}
}
/// See [`Filter`](crate::model::Filter)
pub mod filter {
/// A builder for [`Filter`](crate::model::Filter)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) filter_arn: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) filter_expression: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the filter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the filter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The ARN of the filter.</p>
pub fn filter_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.filter_arn = Some(input.into());
self
}
/// <p>The ARN of the filter.</p>
pub fn set_filter_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.filter_arn = input;
self
}
/// <p>The time at which the filter was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The time at which the filter was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The time at which the filter was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The time at which the filter was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The ARN of the dataset group to which the filter belongs.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>If the filter failed, the reason for its failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If the filter failed, the reason for its failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>Specifies the type of item interactions to filter out of recommendation results. The
/// filter expression must follow specific format rules. For information about filter expression structure and syntax, see
/// <a>filter-expressions</a>.</p>
pub fn filter_expression(mut self, input: impl Into<std::string::String>) -> Self {
self.filter_expression = Some(input.into());
self
}
/// <p>Specifies the type of item interactions to filter out of recommendation results. The
/// filter expression must follow specific format rules. For information about filter expression structure and syntax, see
/// <a>filter-expressions</a>.</p>
pub fn set_filter_expression(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.filter_expression = input;
self
}
/// <p>The status of the filter.</p>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the filter.</p>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// Consumes the builder and constructs a [`Filter`](crate::model::Filter)
pub fn build(self) -> crate::model::Filter {
crate::model::Filter {
name: self.name,
filter_arn: self.filter_arn,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
dataset_group_arn: self.dataset_group_arn,
failure_reason: self.failure_reason,
filter_expression: self.filter_expression,
status: self.status,
}
}
}
}
impl Filter {
/// Creates a new builder-style object to manufacture [`Filter`](crate::model::Filter)
pub fn builder() -> crate::model::filter::Builder {
crate::model::filter::Builder::default()
}
}
/// <p>Provides feature transformation information. Feature transformation is the process
/// of modifying raw input data into a form more suitable for model training.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct FeatureTransformation {
/// <p>The name of the feature transformation.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the FeatureTransformation object.</p>
pub feature_transformation_arn: std::option::Option<std::string::String>,
/// <p>Provides the default parameters for feature transformation.</p>
pub default_parameters:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The creation date and time (in Unix time) of the feature transformation.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The last update date and time (in Unix time) of the feature transformation.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The status of the feature transformation.</p>
/// <p>A feature transformation can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
}
impl FeatureTransformation {
/// <p>The name of the feature transformation.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the FeatureTransformation object.</p>
pub fn feature_transformation_arn(&self) -> std::option::Option<&str> {
self.feature_transformation_arn.as_deref()
}
/// <p>Provides the default parameters for feature transformation.</p>
pub fn default_parameters(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.default_parameters.as_ref()
}
/// <p>The creation date and time (in Unix time) of the feature transformation.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The last update date and time (in Unix time) of the feature transformation.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>The status of the feature transformation.</p>
/// <p>A feature transformation can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
}
impl std::fmt::Debug for FeatureTransformation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("FeatureTransformation");
formatter.field("name", &self.name);
formatter.field(
"feature_transformation_arn",
&self.feature_transformation_arn,
);
formatter.field("default_parameters", &self.default_parameters);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("status", &self.status);
formatter.finish()
}
}
/// See [`FeatureTransformation`](crate::model::FeatureTransformation)
pub mod feature_transformation {
/// A builder for [`FeatureTransformation`](crate::model::FeatureTransformation)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) feature_transformation_arn: std::option::Option<std::string::String>,
pub(crate) default_parameters: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) status: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the feature transformation.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the feature transformation.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the FeatureTransformation object.</p>
pub fn feature_transformation_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.feature_transformation_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the FeatureTransformation object.</p>
pub fn set_feature_transformation_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.feature_transformation_arn = input;
self
}
/// Adds a key-value pair to `default_parameters`.
///
/// To override the contents of this collection use [`set_default_parameters`](Self::set_default_parameters).
///
/// <p>Provides the default parameters for feature transformation.</p>
pub fn default_parameters(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.default_parameters.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.default_parameters = Some(hash_map);
self
}
/// <p>Provides the default parameters for feature transformation.</p>
pub fn set_default_parameters(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.default_parameters = input;
self
}
/// <p>The creation date and time (in Unix time) of the feature transformation.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The creation date and time (in Unix time) of the feature transformation.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The last update date and time (in Unix time) of the feature transformation.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The last update date and time (in Unix time) of the feature transformation.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>The status of the feature transformation.</p>
/// <p>A feature transformation can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the feature transformation.</p>
/// <p>A feature transformation can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// Consumes the builder and constructs a [`FeatureTransformation`](crate::model::FeatureTransformation)
pub fn build(self) -> crate::model::FeatureTransformation {
crate::model::FeatureTransformation {
name: self.name,
feature_transformation_arn: self.feature_transformation_arn,
default_parameters: self.default_parameters,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
status: self.status,
}
}
}
}
impl FeatureTransformation {
/// Creates a new builder-style object to manufacture [`FeatureTransformation`](crate::model::FeatureTransformation)
pub fn builder() -> crate::model::feature_transformation::Builder {
crate::model::feature_transformation::Builder::default()
}
}
/// <p>Provides information about an event tracker.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct EventTracker {
/// <p>The name of the event tracker.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The ARN of the event tracker.</p>
pub event_tracker_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Web Services account that owns the event tracker.</p>
pub account_id: std::option::Option<std::string::String>,
/// <p>The ID of the event tracker. Include this ID in requests to the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html">PutEvents</a> API.</p>
pub tracking_id: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset group that receives the event data.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix format) that the event tracker was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl EventTracker {
/// <p>The name of the event tracker.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The ARN of the event tracker.</p>
pub fn event_tracker_arn(&self) -> std::option::Option<&str> {
self.event_tracker_arn.as_deref()
}
/// <p>The Amazon Web Services account that owns the event tracker.</p>
pub fn account_id(&self) -> std::option::Option<&str> {
self.account_id.as_deref()
}
/// <p>The ID of the event tracker. Include this ID in requests to the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html">PutEvents</a> API.</p>
pub fn tracking_id(&self) -> std::option::Option<&str> {
self.tracking_id.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset group that receives the event data.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The date and time (in Unix format) that the event tracker was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for EventTracker {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("EventTracker");
formatter.field("name", &self.name);
formatter.field("event_tracker_arn", &self.event_tracker_arn);
formatter.field("account_id", &self.account_id);
formatter.field("tracking_id", &self.tracking_id);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`EventTracker`](crate::model::EventTracker)
pub mod event_tracker {
/// A builder for [`EventTracker`](crate::model::EventTracker)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) event_tracker_arn: std::option::Option<std::string::String>,
pub(crate) account_id: std::option::Option<std::string::String>,
pub(crate) tracking_id: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the event tracker.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the event tracker.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The ARN of the event tracker.</p>
pub fn event_tracker_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.event_tracker_arn = Some(input.into());
self
}
/// <p>The ARN of the event tracker.</p>
pub fn set_event_tracker_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.event_tracker_arn = input;
self
}
/// <p>The Amazon Web Services account that owns the event tracker.</p>
pub fn account_id(mut self, input: impl Into<std::string::String>) -> Self {
self.account_id = Some(input.into());
self
}
/// <p>The Amazon Web Services account that owns the event tracker.</p>
pub fn set_account_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.account_id = input;
self
}
/// <p>The ID of the event tracker. Include this ID in requests to the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html">PutEvents</a> API.</p>
pub fn tracking_id(mut self, input: impl Into<std::string::String>) -> Self {
self.tracking_id = Some(input.into());
self
}
/// <p>The ID of the event tracker. Include this ID in requests to the
/// <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html">PutEvents</a> API.</p>
pub fn set_tracking_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.tracking_id = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group that receives the event data.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group that receives the event data.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the event tracker.</p>
/// <p>An event tracker can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The date and time (in Unix format) that the event tracker was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the event tracker was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the event tracker was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`EventTracker`](crate::model::EventTracker)
pub fn build(self) -> crate::model::EventTracker {
crate::model::EventTracker {
name: self.name,
event_tracker_arn: self.event_tracker_arn,
account_id: self.account_id,
tracking_id: self.tracking_id,
dataset_group_arn: self.dataset_group_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl EventTracker {
/// Creates a new builder-style object to manufacture [`EventTracker`](crate::model::EventTracker)
pub fn builder() -> crate::model::event_tracker::Builder {
crate::model::event_tracker::Builder::default()
}
}
/// <p>Describes a job that imports training data from a data source (Amazon S3 bucket) to an
/// Amazon Personalize dataset. For more information, see <a>CreateDatasetImportJob</a>.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetImportJob {
/// <p>The name of the import job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The ARN of the dataset import job.</p>
pub dataset_import_job_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset that receives the imported data.</p>
pub dataset_arn: std::option::Option<std::string::String>,
/// <p>The Amazon S3 bucket that contains the training data to import.</p>
pub data_source: std::option::Option<crate::model::DataSource>,
/// <p>The ARN of the IAM role that has permissions to read from the Amazon S3 data
/// source.</p>
pub role_arn: std::option::Option<std::string::String>,
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The creation date and time (in Unix time) of the dataset import job.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) the dataset was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If a dataset import job fails, provides the reason why.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl DatasetImportJob {
/// <p>The name of the import job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The ARN of the dataset import job.</p>
pub fn dataset_import_job_arn(&self) -> std::option::Option<&str> {
self.dataset_import_job_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset that receives the imported data.</p>
pub fn dataset_arn(&self) -> std::option::Option<&str> {
self.dataset_arn.as_deref()
}
/// <p>The Amazon S3 bucket that contains the training data to import.</p>
pub fn data_source(&self) -> std::option::Option<&crate::model::DataSource> {
self.data_source.as_ref()
}
/// <p>The ARN of the IAM role that has permissions to read from the Amazon S3 data
/// source.</p>
pub fn role_arn(&self) -> std::option::Option<&str> {
self.role_arn.as_deref()
}
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The creation date and time (in Unix time) of the dataset import job.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) the dataset was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If a dataset import job fails, provides the reason why.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for DatasetImportJob {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetImportJob");
formatter.field("job_name", &self.job_name);
formatter.field("dataset_import_job_arn", &self.dataset_import_job_arn);
formatter.field("dataset_arn", &self.dataset_arn);
formatter.field("data_source", &self.data_source);
formatter.field("role_arn", &self.role_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`DatasetImportJob`](crate::model::DatasetImportJob)
pub mod dataset_import_job {
/// A builder for [`DatasetImportJob`](crate::model::DatasetImportJob)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) dataset_import_job_arn: std::option::Option<std::string::String>,
pub(crate) dataset_arn: std::option::Option<std::string::String>,
pub(crate) data_source: std::option::Option<crate::model::DataSource>,
pub(crate) role_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the import job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the import job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The ARN of the dataset import job.</p>
pub fn dataset_import_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_import_job_arn = Some(input.into());
self
}
/// <p>The ARN of the dataset import job.</p>
pub fn set_dataset_import_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_import_job_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset that receives the imported data.</p>
pub fn dataset_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset that receives the imported data.</p>
pub fn set_dataset_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.dataset_arn = input;
self
}
/// <p>The Amazon S3 bucket that contains the training data to import.</p>
pub fn data_source(mut self, input: crate::model::DataSource) -> Self {
self.data_source = Some(input);
self
}
/// <p>The Amazon S3 bucket that contains the training data to import.</p>
pub fn set_data_source(
mut self,
input: std::option::Option<crate::model::DataSource>,
) -> Self {
self.data_source = input;
self
}
/// <p>The ARN of the IAM role that has permissions to read from the Amazon S3 data
/// source.</p>
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.role_arn = Some(input.into());
self
}
/// <p>The ARN of the IAM role that has permissions to read from the Amazon S3 data
/// source.</p>
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.role_arn = input;
self
}
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset import job.</p>
/// <p>A dataset import job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The creation date and time (in Unix time) of the dataset import job.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The creation date and time (in Unix time) of the dataset import job.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) the dataset was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) the dataset was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If a dataset import job fails, provides the reason why.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a dataset import job fails, provides the reason why.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`DatasetImportJob`](crate::model::DatasetImportJob)
pub fn build(self) -> crate::model::DatasetImportJob {
crate::model::DatasetImportJob {
job_name: self.job_name,
dataset_import_job_arn: self.dataset_import_job_arn,
dataset_arn: self.dataset_arn,
data_source: self.data_source,
role_arn: self.role_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
}
}
}
}
impl DatasetImportJob {
/// Creates a new builder-style object to manufacture [`DatasetImportJob`](crate::model::DatasetImportJob)
pub fn builder() -> crate::model::dataset_import_job::Builder {
crate::model::dataset_import_job::Builder::default()
}
}
/// <p>Describes the data source that contains the data to upload to a dataset.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DataSource {
/// <p>The path to the Amazon S3 bucket where the data that you want to upload to your dataset is
/// stored. For example: </p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub data_location: std::option::Option<std::string::String>,
}
impl DataSource {
/// <p>The path to the Amazon S3 bucket where the data that you want to upload to your dataset is
/// stored. For example: </p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub fn data_location(&self) -> std::option::Option<&str> {
self.data_location.as_deref()
}
}
impl std::fmt::Debug for DataSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DataSource");
formatter.field("data_location", &self.data_location);
formatter.finish()
}
}
/// See [`DataSource`](crate::model::DataSource)
pub mod data_source {
/// A builder for [`DataSource`](crate::model::DataSource)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) data_location: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The path to the Amazon S3 bucket where the data that you want to upload to your dataset is
/// stored. For example: </p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub fn data_location(mut self, input: impl Into<std::string::String>) -> Self {
self.data_location = Some(input.into());
self
}
/// <p>The path to the Amazon S3 bucket where the data that you want to upload to your dataset is
/// stored. For example: </p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub fn set_data_location(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.data_location = input;
self
}
/// Consumes the builder and constructs a [`DataSource`](crate::model::DataSource)
pub fn build(self) -> crate::model::DataSource {
crate::model::DataSource {
data_location: self.data_location,
}
}
}
}
impl DataSource {
/// Creates a new builder-style object to manufacture [`DataSource`](crate::model::DataSource)
pub fn builder() -> crate::model::data_source::Builder {
crate::model::data_source::Builder::default()
}
}
/// <p>A dataset group is a collection of related datasets (Interactions, User, and Item). You
/// create a dataset group by calling <a>CreateDatasetGroup</a>. You then create a
/// dataset and add it to a dataset group by calling <a>CreateDataset</a>. The dataset
/// group is used to create and train a solution by calling <a>CreateSolution</a>. A
/// dataset group can contain only one of each type of dataset.</p>
/// <p>You can specify an Key Management Service (KMS) key to encrypt the datasets in the group.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetGroup {
/// <p>The name of the dataset group.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>The current status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The ARN of the IAM role that has permissions to create the dataset group.</p>
pub role_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key used to encrypt the datasets.</p>
pub kms_key_arn: std::option::Option<std::string::String>,
/// <p>The creation date and time (in Unix time) of the dataset group.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The last update date and time (in Unix time) of the dataset group.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If creating a dataset group fails, provides the reason why.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The domain of a Domain dataset group.</p>
pub domain: std::option::Option<crate::model::Domain>,
}
impl DatasetGroup {
/// <p>The name of the dataset group.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>The current status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The ARN of the IAM role that has permissions to create the dataset group.</p>
pub fn role_arn(&self) -> std::option::Option<&str> {
self.role_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key used to encrypt the datasets.</p>
pub fn kms_key_arn(&self) -> std::option::Option<&str> {
self.kms_key_arn.as_deref()
}
/// <p>The creation date and time (in Unix time) of the dataset group.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The last update date and time (in Unix time) of the dataset group.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If creating a dataset group fails, provides the reason why.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The domain of a Domain dataset group.</p>
pub fn domain(&self) -> std::option::Option<&crate::model::Domain> {
self.domain.as_ref()
}
}
impl std::fmt::Debug for DatasetGroup {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetGroup");
formatter.field("name", &self.name);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("status", &self.status);
formatter.field("role_arn", &self.role_arn);
formatter.field("kms_key_arn", &self.kms_key_arn);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("domain", &self.domain);
formatter.finish()
}
}
/// See [`DatasetGroup`](crate::model::DatasetGroup)
pub mod dataset_group {
/// A builder for [`DatasetGroup`](crate::model::DatasetGroup)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) role_arn: std::option::Option<std::string::String>,
pub(crate) kms_key_arn: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) domain: std::option::Option<crate::model::Domain>,
}
impl Builder {
/// <p>The name of the dataset group.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the dataset group.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>The current status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The current status of the dataset group.</p>
/// <p>A dataset group can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The ARN of the IAM role that has permissions to create the dataset group.</p>
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.role_arn = Some(input.into());
self
}
/// <p>The ARN of the IAM role that has permissions to create the dataset group.</p>
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.role_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key used to encrypt the datasets.</p>
pub fn kms_key_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.kms_key_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key used to encrypt the datasets.</p>
pub fn set_kms_key_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.kms_key_arn = input;
self
}
/// <p>The creation date and time (in Unix time) of the dataset group.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The creation date and time (in Unix time) of the dataset group.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The last update date and time (in Unix time) of the dataset group.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The last update date and time (in Unix time) of the dataset group.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If creating a dataset group fails, provides the reason why.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If creating a dataset group fails, provides the reason why.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The domain of a Domain dataset group.</p>
pub fn domain(mut self, input: crate::model::Domain) -> Self {
self.domain = Some(input);
self
}
/// <p>The domain of a Domain dataset group.</p>
pub fn set_domain(mut self, input: std::option::Option<crate::model::Domain>) -> Self {
self.domain = input;
self
}
/// Consumes the builder and constructs a [`DatasetGroup`](crate::model::DatasetGroup)
pub fn build(self) -> crate::model::DatasetGroup {
crate::model::DatasetGroup {
name: self.name,
dataset_group_arn: self.dataset_group_arn,
status: self.status,
role_arn: self.role_arn,
kms_key_arn: self.kms_key_arn,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
domain: self.domain,
}
}
}
}
impl DatasetGroup {
/// Creates a new builder-style object to manufacture [`DatasetGroup`](crate::model::DatasetGroup)
pub fn builder() -> crate::model::dataset_group::Builder {
crate::model::dataset_group::Builder::default()
}
}
/// <p>Describes a job that exports a dataset to an Amazon S3 bucket. For more information, see <a>CreateDatasetExportJob</a>.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetExportJob {
/// <p>The name of the export job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub dataset_export_job_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset to export.</p>
pub dataset_arn: std::option::Option<std::string::String>,
/// <p>The data to export, based on how you imported the data. You can choose to export <code>BULK</code> data that you imported using a dataset import job,
/// <code>PUT</code> data that you imported incrementally (using the console, PutEvents, PutUsers and PutItems operations), or <code>ALL</code>
/// for both types. The default value is <code>PUT</code>.
/// </p>
pub ingestion_mode: std::option::Option<crate::model::IngestionMode>,
/// <p>The Amazon Resource Name (ARN) of the IAM service role that has permissions to add data to your
/// output Amazon S3 bucket.</p>
pub role_arn: std::option::Option<std::string::String>,
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The path to the Amazon S3 bucket where the job's output is stored. For example:</p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub job_output: std::option::Option<crate::model::DatasetExportJobOutput>,
/// <p>The creation date and time (in Unix time) of the dataset export job.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) the status of the dataset export job was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>If a dataset export job fails, provides the reason why.</p>
pub failure_reason: std::option::Option<std::string::String>,
}
impl DatasetExportJob {
/// <p>The name of the export job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub fn dataset_export_job_arn(&self) -> std::option::Option<&str> {
self.dataset_export_job_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset to export.</p>
pub fn dataset_arn(&self) -> std::option::Option<&str> {
self.dataset_arn.as_deref()
}
/// <p>The data to export, based on how you imported the data. You can choose to export <code>BULK</code> data that you imported using a dataset import job,
/// <code>PUT</code> data that you imported incrementally (using the console, PutEvents, PutUsers and PutItems operations), or <code>ALL</code>
/// for both types. The default value is <code>PUT</code>.
/// </p>
pub fn ingestion_mode(&self) -> std::option::Option<&crate::model::IngestionMode> {
self.ingestion_mode.as_ref()
}
/// <p>The Amazon Resource Name (ARN) of the IAM service role that has permissions to add data to your
/// output Amazon S3 bucket.</p>
pub fn role_arn(&self) -> std::option::Option<&str> {
self.role_arn.as_deref()
}
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The path to the Amazon S3 bucket where the job's output is stored. For example:</p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub fn job_output(&self) -> std::option::Option<&crate::model::DatasetExportJobOutput> {
self.job_output.as_ref()
}
/// <p>The creation date and time (in Unix time) of the dataset export job.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) the status of the dataset export job was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>If a dataset export job fails, provides the reason why.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
}
impl std::fmt::Debug for DatasetExportJob {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetExportJob");
formatter.field("job_name", &self.job_name);
formatter.field("dataset_export_job_arn", &self.dataset_export_job_arn);
formatter.field("dataset_arn", &self.dataset_arn);
formatter.field("ingestion_mode", &self.ingestion_mode);
formatter.field("role_arn", &self.role_arn);
formatter.field("status", &self.status);
formatter.field("job_output", &self.job_output);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("failure_reason", &self.failure_reason);
formatter.finish()
}
}
/// See [`DatasetExportJob`](crate::model::DatasetExportJob)
pub mod dataset_export_job {
/// A builder for [`DatasetExportJob`](crate::model::DatasetExportJob)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) dataset_export_job_arn: std::option::Option<std::string::String>,
pub(crate) dataset_arn: std::option::Option<std::string::String>,
pub(crate) ingestion_mode: std::option::Option<crate::model::IngestionMode>,
pub(crate) role_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) job_output: std::option::Option<crate::model::DatasetExportJobOutput>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the export job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the export job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub fn dataset_export_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_export_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset export job.</p>
pub fn set_dataset_export_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_export_job_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset to export.</p>
pub fn dataset_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset to export.</p>
pub fn set_dataset_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.dataset_arn = input;
self
}
/// <p>The data to export, based on how you imported the data. You can choose to export <code>BULK</code> data that you imported using a dataset import job,
/// <code>PUT</code> data that you imported incrementally (using the console, PutEvents, PutUsers and PutItems operations), or <code>ALL</code>
/// for both types. The default value is <code>PUT</code>.
/// </p>
pub fn ingestion_mode(mut self, input: crate::model::IngestionMode) -> Self {
self.ingestion_mode = Some(input);
self
}
/// <p>The data to export, based on how you imported the data. You can choose to export <code>BULK</code> data that you imported using a dataset import job,
/// <code>PUT</code> data that you imported incrementally (using the console, PutEvents, PutUsers and PutItems operations), or <code>ALL</code>
/// for both types. The default value is <code>PUT</code>.
/// </p>
pub fn set_ingestion_mode(
mut self,
input: std::option::Option<crate::model::IngestionMode>,
) -> Self {
self.ingestion_mode = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the IAM service role that has permissions to add data to your
/// output Amazon S3 bucket.</p>
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.role_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the IAM service role that has permissions to add data to your
/// output Amazon S3 bucket.</p>
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.role_arn = input;
self
}
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset export job.</p>
/// <p>A dataset export job can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The path to the Amazon S3 bucket where the job's output is stored. For example:</p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub fn job_output(mut self, input: crate::model::DatasetExportJobOutput) -> Self {
self.job_output = Some(input);
self
}
/// <p>The path to the Amazon S3 bucket where the job's output is stored. For example:</p>
/// <p>
/// <code>s3://bucket-name/folder-name/</code>
/// </p>
pub fn set_job_output(
mut self,
input: std::option::Option<crate::model::DatasetExportJobOutput>,
) -> Self {
self.job_output = input;
self
}
/// <p>The creation date and time (in Unix time) of the dataset export job.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The creation date and time (in Unix time) of the dataset export job.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) the status of the dataset export job was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) the status of the dataset export job was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>If a dataset export job fails, provides the reason why.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a dataset export job fails, provides the reason why.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// Consumes the builder and constructs a [`DatasetExportJob`](crate::model::DatasetExportJob)
pub fn build(self) -> crate::model::DatasetExportJob {
crate::model::DatasetExportJob {
job_name: self.job_name,
dataset_export_job_arn: self.dataset_export_job_arn,
dataset_arn: self.dataset_arn,
ingestion_mode: self.ingestion_mode,
role_arn: self.role_arn,
status: self.status,
job_output: self.job_output,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
failure_reason: self.failure_reason,
}
}
}
}
impl DatasetExportJob {
/// Creates a new builder-style object to manufacture [`DatasetExportJob`](crate::model::DatasetExportJob)
pub fn builder() -> crate::model::dataset_export_job::Builder {
crate::model::dataset_export_job::Builder::default()
}
}
/// <p>The output configuration parameters of a dataset export job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DatasetExportJobOutput {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub s3_data_destination: std::option::Option<crate::model::S3DataConfig>,
}
impl DatasetExportJobOutput {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn s3_data_destination(&self) -> std::option::Option<&crate::model::S3DataConfig> {
self.s3_data_destination.as_ref()
}
}
impl std::fmt::Debug for DatasetExportJobOutput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DatasetExportJobOutput");
formatter.field("s3_data_destination", &self.s3_data_destination);
formatter.finish()
}
}
/// See [`DatasetExportJobOutput`](crate::model::DatasetExportJobOutput)
pub mod dataset_export_job_output {
/// A builder for [`DatasetExportJobOutput`](crate::model::DatasetExportJobOutput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) s3_data_destination: std::option::Option<crate::model::S3DataConfig>,
}
impl Builder {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn s3_data_destination(mut self, input: crate::model::S3DataConfig) -> Self {
self.s3_data_destination = Some(input);
self
}
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn set_s3_data_destination(
mut self,
input: std::option::Option<crate::model::S3DataConfig>,
) -> Self {
self.s3_data_destination = input;
self
}
/// Consumes the builder and constructs a [`DatasetExportJobOutput`](crate::model::DatasetExportJobOutput)
pub fn build(self) -> crate::model::DatasetExportJobOutput {
crate::model::DatasetExportJobOutput {
s3_data_destination: self.s3_data_destination,
}
}
}
}
impl DatasetExportJobOutput {
/// Creates a new builder-style object to manufacture [`DatasetExportJobOutput`](crate::model::DatasetExportJobOutput)
pub fn builder() -> crate::model::dataset_export_job_output::Builder {
crate::model::dataset_export_job_output::Builder::default()
}
}
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct S3DataConfig {
/// <p>The file path of the Amazon S3 bucket.</p>
pub path: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key that Amazon Personalize uses to
/// encrypt or decrypt the input and output files of a batch inference job.</p>
pub kms_key_arn: std::option::Option<std::string::String>,
}
impl S3DataConfig {
/// <p>The file path of the Amazon S3 bucket.</p>
pub fn path(&self) -> std::option::Option<&str> {
self.path.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key that Amazon Personalize uses to
/// encrypt or decrypt the input and output files of a batch inference job.</p>
pub fn kms_key_arn(&self) -> std::option::Option<&str> {
self.kms_key_arn.as_deref()
}
}
impl std::fmt::Debug for S3DataConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("S3DataConfig");
formatter.field("path", &self.path);
formatter.field("kms_key_arn", &self.kms_key_arn);
formatter.finish()
}
}
/// See [`S3DataConfig`](crate::model::S3DataConfig)
pub mod s3_data_config {
/// A builder for [`S3DataConfig`](crate::model::S3DataConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) path: std::option::Option<std::string::String>,
pub(crate) kms_key_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The file path of the Amazon S3 bucket.</p>
pub fn path(mut self, input: impl Into<std::string::String>) -> Self {
self.path = Some(input.into());
self
}
/// <p>The file path of the Amazon S3 bucket.</p>
pub fn set_path(mut self, input: std::option::Option<std::string::String>) -> Self {
self.path = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key that Amazon Personalize uses to
/// encrypt or decrypt the input and output files of a batch inference job.</p>
pub fn kms_key_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.kms_key_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the Key Management Service (KMS) key that Amazon Personalize uses to
/// encrypt or decrypt the input and output files of a batch inference job.</p>
pub fn set_kms_key_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.kms_key_arn = input;
self
}
/// Consumes the builder and constructs a [`S3DataConfig`](crate::model::S3DataConfig)
pub fn build(self) -> crate::model::S3DataConfig {
crate::model::S3DataConfig {
path: self.path,
kms_key_arn: self.kms_key_arn,
}
}
}
}
impl S3DataConfig {
/// Creates a new builder-style object to manufacture [`S3DataConfig`](crate::model::S3DataConfig)
pub fn builder() -> crate::model::s3_data_config::Builder {
crate::model::s3_data_config::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum IngestionMode {
#[allow(missing_docs)] // documentation missing in model
All,
#[allow(missing_docs)] // documentation missing in model
Bulk,
#[allow(missing_docs)] // documentation missing in model
Put,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for IngestionMode {
fn from(s: &str) -> Self {
match s {
"ALL" => IngestionMode::All,
"BULK" => IngestionMode::Bulk,
"PUT" => IngestionMode::Put,
other => IngestionMode::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for IngestionMode {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(IngestionMode::from(s))
}
}
impl IngestionMode {
/// Returns the `&str` value of the enum member.
pub fn as_str(&self) -> &str {
match self {
IngestionMode::All => "ALL",
IngestionMode::Bulk => "BULK",
IngestionMode::Put => "PUT",
IngestionMode::Unknown(s) => s.as_ref(),
}
}
/// Returns all the `&str` values of the enum members.
pub fn values() -> &'static [&'static str] {
&["ALL", "BULK", "PUT"]
}
}
impl AsRef<str> for IngestionMode {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Provides metadata for a dataset.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Dataset {
/// <p>The name of the dataset.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset that you want metadata for.</p>
pub dataset_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub dataset_group_arn: std::option::Option<std::string::String>,
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// </ul>
pub dataset_type: std::option::Option<std::string::String>,
/// <p>The ARN of the associated schema.</p>
pub schema_arn: std::option::Option<std::string::String>,
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The creation date and time (in Unix time) of the dataset.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>A time stamp that shows when the dataset was updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Dataset {
/// <p>The name of the dataset.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset that you want metadata for.</p>
pub fn dataset_arn(&self) -> std::option::Option<&str> {
self.dataset_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn dataset_group_arn(&self) -> std::option::Option<&str> {
self.dataset_group_arn.as_deref()
}
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// </ul>
pub fn dataset_type(&self) -> std::option::Option<&str> {
self.dataset_type.as_deref()
}
/// <p>The ARN of the associated schema.</p>
pub fn schema_arn(&self) -> std::option::Option<&str> {
self.schema_arn.as_deref()
}
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The creation date and time (in Unix time) of the dataset.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>A time stamp that shows when the dataset was updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for Dataset {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Dataset");
formatter.field("name", &self.name);
formatter.field("dataset_arn", &self.dataset_arn);
formatter.field("dataset_group_arn", &self.dataset_group_arn);
formatter.field("dataset_type", &self.dataset_type);
formatter.field("schema_arn", &self.schema_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`Dataset`](crate::model::Dataset)
pub mod dataset {
/// A builder for [`Dataset`](crate::model::Dataset)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) dataset_arn: std::option::Option<std::string::String>,
pub(crate) dataset_group_arn: std::option::Option<std::string::String>,
pub(crate) dataset_type: std::option::Option<std::string::String>,
pub(crate) schema_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the dataset.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the dataset.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset that you want metadata for.</p>
pub fn dataset_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset that you want metadata for.</p>
pub fn set_dataset_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.dataset_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn dataset_group_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_group_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the dataset group.</p>
pub fn set_dataset_group_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.dataset_group_arn = input;
self
}
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// </ul>
pub fn dataset_type(mut self, input: impl Into<std::string::String>) -> Self {
self.dataset_type = Some(input.into());
self
}
/// <p>One of the following values:</p>
/// <ul>
/// <li>
/// <p>Interactions</p>
/// </li>
/// <li>
/// <p>Items</p>
/// </li>
/// <li>
/// <p>Users</p>
/// </li>
/// </ul>
pub fn set_dataset_type(mut self, input: std::option::Option<std::string::String>) -> Self {
self.dataset_type = input;
self
}
/// <p>The ARN of the associated schema.</p>
pub fn schema_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.schema_arn = Some(input.into());
self
}
/// <p>The ARN of the associated schema.</p>
pub fn set_schema_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.schema_arn = input;
self
}
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the dataset.</p>
/// <p>A dataset can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The creation date and time (in Unix time) of the dataset.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The creation date and time (in Unix time) of the dataset.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>A time stamp that shows when the dataset was updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>A time stamp that shows when the dataset was updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`Dataset`](crate::model::Dataset)
pub fn build(self) -> crate::model::Dataset {
crate::model::Dataset {
name: self.name,
dataset_arn: self.dataset_arn,
dataset_group_arn: self.dataset_group_arn,
dataset_type: self.dataset_type,
schema_arn: self.schema_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl Dataset {
/// Creates a new builder-style object to manufacture [`Dataset`](crate::model::Dataset)
pub fn builder() -> crate::model::dataset::Builder {
crate::model::dataset::Builder::default()
}
}
/// <p>An object that describes the deployment of a solution version.
/// For more information on campaigns, see <a>CreateCampaign</a>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Campaign {
/// <p>The name of the campaign.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the campaign. </p>
pub campaign_arn: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of a specific version of the solution.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second.</p>
pub min_provisioned_tps: std::option::Option<i32>,
/// <p>The configuration details of a campaign.</p>
pub campaign_config: std::option::Option<crate::model::CampaignConfig>,
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>If a campaign fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix format) that the campaign was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix format) that the campaign was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>Provides a summary of the properties of a campaign update. For a complete listing, call the
/// <a>DescribeCampaign</a> API.</p>
pub latest_campaign_update: std::option::Option<crate::model::CampaignUpdateSummary>,
}
impl Campaign {
/// <p>The name of the campaign.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the campaign. </p>
pub fn campaign_arn(&self) -> std::option::Option<&str> {
self.campaign_arn.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of a specific version of the solution.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second.</p>
pub fn min_provisioned_tps(&self) -> std::option::Option<i32> {
self.min_provisioned_tps
}
/// <p>The configuration details of a campaign.</p>
pub fn campaign_config(&self) -> std::option::Option<&crate::model::CampaignConfig> {
self.campaign_config.as_ref()
}
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>If a campaign fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The date and time (in Unix format) that the campaign was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix format) that the campaign was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
/// <p>Provides a summary of the properties of a campaign update. For a complete listing, call the
/// <a>DescribeCampaign</a> API.</p>
pub fn latest_campaign_update(
&self,
) -> std::option::Option<&crate::model::CampaignUpdateSummary> {
self.latest_campaign_update.as_ref()
}
}
impl std::fmt::Debug for Campaign {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Campaign");
formatter.field("name", &self.name);
formatter.field("campaign_arn", &self.campaign_arn);
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.field("min_provisioned_tps", &self.min_provisioned_tps);
formatter.field("campaign_config", &self.campaign_config);
formatter.field("status", &self.status);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.field("latest_campaign_update", &self.latest_campaign_update);
formatter.finish()
}
}
/// See [`Campaign`](crate::model::Campaign)
pub mod campaign {
/// A builder for [`Campaign`](crate::model::Campaign)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) campaign_arn: std::option::Option<std::string::String>,
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
pub(crate) min_provisioned_tps: std::option::Option<i32>,
pub(crate) campaign_config: std::option::Option<crate::model::CampaignConfig>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) latest_campaign_update: std::option::Option<crate::model::CampaignUpdateSummary>,
}
impl Builder {
/// <p>The name of the campaign.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the campaign.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the campaign. </p>
pub fn campaign_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.campaign_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the campaign. </p>
pub fn set_campaign_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.campaign_arn = input;
self
}
/// <p>The Amazon Resource Name (ARN) of a specific version of the solution.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of a specific version of the solution.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second.</p>
pub fn min_provisioned_tps(mut self, input: i32) -> Self {
self.min_provisioned_tps = Some(input);
self
}
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second.</p>
pub fn set_min_provisioned_tps(mut self, input: std::option::Option<i32>) -> Self {
self.min_provisioned_tps = input;
self
}
/// <p>The configuration details of a campaign.</p>
pub fn campaign_config(mut self, input: crate::model::CampaignConfig) -> Self {
self.campaign_config = Some(input);
self
}
/// <p>The configuration details of a campaign.</p>
pub fn set_campaign_config(
mut self,
input: std::option::Option<crate::model::CampaignConfig>,
) -> Self {
self.campaign_config = input;
self
}
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the campaign.</p>
/// <p>A campaign can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>If a campaign fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a campaign fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The date and time (in Unix format) that the campaign was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the campaign was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix format) that the campaign was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix format) that the campaign was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// <p>Provides a summary of the properties of a campaign update. For a complete listing, call the
/// <a>DescribeCampaign</a> API.</p>
pub fn latest_campaign_update(
mut self,
input: crate::model::CampaignUpdateSummary,
) -> Self {
self.latest_campaign_update = Some(input);
self
}
/// <p>Provides a summary of the properties of a campaign update. For a complete listing, call the
/// <a>DescribeCampaign</a> API.</p>
pub fn set_latest_campaign_update(
mut self,
input: std::option::Option<crate::model::CampaignUpdateSummary>,
) -> Self {
self.latest_campaign_update = input;
self
}
/// Consumes the builder and constructs a [`Campaign`](crate::model::Campaign)
pub fn build(self) -> crate::model::Campaign {
crate::model::Campaign {
name: self.name,
campaign_arn: self.campaign_arn,
solution_version_arn: self.solution_version_arn,
min_provisioned_tps: self.min_provisioned_tps,
campaign_config: self.campaign_config,
status: self.status,
failure_reason: self.failure_reason,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
latest_campaign_update: self.latest_campaign_update,
}
}
}
}
impl Campaign {
/// Creates a new builder-style object to manufacture [`Campaign`](crate::model::Campaign)
pub fn builder() -> crate::model::campaign::Builder {
crate::model::campaign::Builder::default()
}
}
/// <p>Provides a summary of the properties of a campaign update. For a complete listing, call the
/// <a>DescribeCampaign</a> API.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CampaignUpdateSummary {
/// <p>The Amazon Resource Name (ARN) of the deployed solution version.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second that
/// Amazon Personalize will support.</p>
pub min_provisioned_tps: std::option::Option<i32>,
/// <p>The configuration details of a campaign.</p>
pub campaign_config: std::option::Option<crate::model::CampaignConfig>,
/// <p>The status of the campaign update.</p>
/// <p>A campaign update can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>If a campaign update fails, the reason behind the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the campaign update was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the campaign update was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl CampaignUpdateSummary {
/// <p>The Amazon Resource Name (ARN) of the deployed solution version.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second that
/// Amazon Personalize will support.</p>
pub fn min_provisioned_tps(&self) -> std::option::Option<i32> {
self.min_provisioned_tps
}
/// <p>The configuration details of a campaign.</p>
pub fn campaign_config(&self) -> std::option::Option<&crate::model::CampaignConfig> {
self.campaign_config.as_ref()
}
/// <p>The status of the campaign update.</p>
/// <p>A campaign update can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>If a campaign update fails, the reason behind the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The date and time (in Unix time) that the campaign update was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the campaign update was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for CampaignUpdateSummary {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CampaignUpdateSummary");
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.field("min_provisioned_tps", &self.min_provisioned_tps);
formatter.field("campaign_config", &self.campaign_config);
formatter.field("status", &self.status);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`CampaignUpdateSummary`](crate::model::CampaignUpdateSummary)
pub mod campaign_update_summary {
/// A builder for [`CampaignUpdateSummary`](crate::model::CampaignUpdateSummary)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
pub(crate) min_provisioned_tps: std::option::Option<i32>,
pub(crate) campaign_config: std::option::Option<crate::model::CampaignConfig>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The Amazon Resource Name (ARN) of the deployed solution version.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the deployed solution version.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second that
/// Amazon Personalize will support.</p>
pub fn min_provisioned_tps(mut self, input: i32) -> Self {
self.min_provisioned_tps = Some(input);
self
}
/// <p>Specifies the requested minimum provisioned transactions (recommendations) per second that
/// Amazon Personalize will support.</p>
pub fn set_min_provisioned_tps(mut self, input: std::option::Option<i32>) -> Self {
self.min_provisioned_tps = input;
self
}
/// <p>The configuration details of a campaign.</p>
pub fn campaign_config(mut self, input: crate::model::CampaignConfig) -> Self {
self.campaign_config = Some(input);
self
}
/// <p>The configuration details of a campaign.</p>
pub fn set_campaign_config(
mut self,
input: std::option::Option<crate::model::CampaignConfig>,
) -> Self {
self.campaign_config = input;
self
}
/// <p>The status of the campaign update.</p>
/// <p>A campaign update can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the campaign update.</p>
/// <p>A campaign update can be in one of the following states:</p>
/// <ul>
/// <li>
/// <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED</p>
/// </li>
/// <li>
/// <p>DELETE PENDING > DELETE IN_PROGRESS</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>If a campaign update fails, the reason behind the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If a campaign update fails, the reason behind the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The date and time (in Unix time) that the campaign update was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the campaign update was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the campaign update was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the campaign update was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`CampaignUpdateSummary`](crate::model::CampaignUpdateSummary)
pub fn build(self) -> crate::model::CampaignUpdateSummary {
crate::model::CampaignUpdateSummary {
solution_version_arn: self.solution_version_arn,
min_provisioned_tps: self.min_provisioned_tps,
campaign_config: self.campaign_config,
status: self.status,
failure_reason: self.failure_reason,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl CampaignUpdateSummary {
/// Creates a new builder-style object to manufacture [`CampaignUpdateSummary`](crate::model::CampaignUpdateSummary)
pub fn builder() -> crate::model::campaign_update_summary::Builder {
crate::model::campaign_update_summary::Builder::default()
}
}
/// <p>Contains information on a batch segment job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchSegmentJob {
/// <p>The name of the batch segment job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub batch_segment_job_arn: std::option::Option<std::string::String>,
/// <p>The ARN of the filter used on the batch segment job.</p>
pub filter_arn: std::option::Option<std::string::String>,
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
/// <p>The number of predicted users generated by the batch segment job for each line of input data.</p>
pub num_results: std::option::Option<i32>,
/// <p>The Amazon S3 path that leads to the input data used to generate the batch segment job.</p>
pub job_input: std::option::Option<crate::model::BatchSegmentJobInput>,
/// <p>The Amazon S3 bucket that contains the output data generated by the batch segment job.</p>
pub job_output: std::option::Option<crate::model::BatchSegmentJobOutput>,
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch segment job.</p>
pub role_arn: std::option::Option<std::string::String>,
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The time at which the batch segment job was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The time at which the batch segment job last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl BatchSegmentJob {
/// <p>The name of the batch segment job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub fn batch_segment_job_arn(&self) -> std::option::Option<&str> {
self.batch_segment_job_arn.as_deref()
}
/// <p>The ARN of the filter used on the batch segment job.</p>
pub fn filter_arn(&self) -> std::option::Option<&str> {
self.filter_arn.as_deref()
}
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
/// <p>The number of predicted users generated by the batch segment job for each line of input data.</p>
pub fn num_results(&self) -> std::option::Option<i32> {
self.num_results
}
/// <p>The Amazon S3 path that leads to the input data used to generate the batch segment job.</p>
pub fn job_input(&self) -> std::option::Option<&crate::model::BatchSegmentJobInput> {
self.job_input.as_ref()
}
/// <p>The Amazon S3 bucket that contains the output data generated by the batch segment job.</p>
pub fn job_output(&self) -> std::option::Option<&crate::model::BatchSegmentJobOutput> {
self.job_output.as_ref()
}
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch segment job.</p>
pub fn role_arn(&self) -> std::option::Option<&str> {
self.role_arn.as_deref()
}
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The time at which the batch segment job was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The time at which the batch segment job last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for BatchSegmentJob {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchSegmentJob");
formatter.field("job_name", &self.job_name);
formatter.field("batch_segment_job_arn", &self.batch_segment_job_arn);
formatter.field("filter_arn", &self.filter_arn);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.field("num_results", &self.num_results);
formatter.field("job_input", &self.job_input);
formatter.field("job_output", &self.job_output);
formatter.field("role_arn", &self.role_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`BatchSegmentJob`](crate::model::BatchSegmentJob)
pub mod batch_segment_job {
/// A builder for [`BatchSegmentJob`](crate::model::BatchSegmentJob)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) batch_segment_job_arn: std::option::Option<std::string::String>,
pub(crate) filter_arn: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
pub(crate) num_results: std::option::Option<i32>,
pub(crate) job_input: std::option::Option<crate::model::BatchSegmentJobInput>,
pub(crate) job_output: std::option::Option<crate::model::BatchSegmentJobOutput>,
pub(crate) role_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the batch segment job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the batch segment job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub fn batch_segment_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.batch_segment_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the batch segment job.</p>
pub fn set_batch_segment_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.batch_segment_job_arn = input;
self
}
/// <p>The ARN of the filter used on the batch segment job.</p>
pub fn filter_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.filter_arn = Some(input.into());
self
}
/// <p>The ARN of the filter used on the batch segment job.</p>
pub fn set_filter_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.filter_arn = input;
self
}
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If the batch segment job failed, the reason for the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version used by the batch segment job to generate batch segments.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// <p>The number of predicted users generated by the batch segment job for each line of input data.</p>
pub fn num_results(mut self, input: i32) -> Self {
self.num_results = Some(input);
self
}
/// <p>The number of predicted users generated by the batch segment job for each line of input data.</p>
pub fn set_num_results(mut self, input: std::option::Option<i32>) -> Self {
self.num_results = input;
self
}
/// <p>The Amazon S3 path that leads to the input data used to generate the batch segment job.</p>
pub fn job_input(mut self, input: crate::model::BatchSegmentJobInput) -> Self {
self.job_input = Some(input);
self
}
/// <p>The Amazon S3 path that leads to the input data used to generate the batch segment job.</p>
pub fn set_job_input(
mut self,
input: std::option::Option<crate::model::BatchSegmentJobInput>,
) -> Self {
self.job_input = input;
self
}
/// <p>The Amazon S3 bucket that contains the output data generated by the batch segment job.</p>
pub fn job_output(mut self, input: crate::model::BatchSegmentJobOutput) -> Self {
self.job_output = Some(input);
self
}
/// <p>The Amazon S3 bucket that contains the output data generated by the batch segment job.</p>
pub fn set_job_output(
mut self,
input: std::option::Option<crate::model::BatchSegmentJobOutput>,
) -> Self {
self.job_output = input;
self
}
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch segment job.</p>
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.role_arn = Some(input.into());
self
}
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch segment job.</p>
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.role_arn = input;
self
}
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the batch segment job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The time at which the batch segment job was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The time at which the batch segment job was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The time at which the batch segment job last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The time at which the batch segment job last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`BatchSegmentJob`](crate::model::BatchSegmentJob)
pub fn build(self) -> crate::model::BatchSegmentJob {
crate::model::BatchSegmentJob {
job_name: self.job_name,
batch_segment_job_arn: self.batch_segment_job_arn,
filter_arn: self.filter_arn,
failure_reason: self.failure_reason,
solution_version_arn: self.solution_version_arn,
num_results: self.num_results,
job_input: self.job_input,
job_output: self.job_output,
role_arn: self.role_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl BatchSegmentJob {
/// Creates a new builder-style object to manufacture [`BatchSegmentJob`](crate::model::BatchSegmentJob)
pub fn builder() -> crate::model::batch_segment_job::Builder {
crate::model::batch_segment_job::Builder::default()
}
}
/// <p>The output configuration parameters of a batch segment job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchSegmentJobOutput {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub s3_data_destination: std::option::Option<crate::model::S3DataConfig>,
}
impl BatchSegmentJobOutput {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn s3_data_destination(&self) -> std::option::Option<&crate::model::S3DataConfig> {
self.s3_data_destination.as_ref()
}
}
impl std::fmt::Debug for BatchSegmentJobOutput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchSegmentJobOutput");
formatter.field("s3_data_destination", &self.s3_data_destination);
formatter.finish()
}
}
/// See [`BatchSegmentJobOutput`](crate::model::BatchSegmentJobOutput)
pub mod batch_segment_job_output {
/// A builder for [`BatchSegmentJobOutput`](crate::model::BatchSegmentJobOutput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) s3_data_destination: std::option::Option<crate::model::S3DataConfig>,
}
impl Builder {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn s3_data_destination(mut self, input: crate::model::S3DataConfig) -> Self {
self.s3_data_destination = Some(input);
self
}
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn set_s3_data_destination(
mut self,
input: std::option::Option<crate::model::S3DataConfig>,
) -> Self {
self.s3_data_destination = input;
self
}
/// Consumes the builder and constructs a [`BatchSegmentJobOutput`](crate::model::BatchSegmentJobOutput)
pub fn build(self) -> crate::model::BatchSegmentJobOutput {
crate::model::BatchSegmentJobOutput {
s3_data_destination: self.s3_data_destination,
}
}
}
}
impl BatchSegmentJobOutput {
/// Creates a new builder-style object to manufacture [`BatchSegmentJobOutput`](crate::model::BatchSegmentJobOutput)
pub fn builder() -> crate::model::batch_segment_job_output::Builder {
crate::model::batch_segment_job_output::Builder::default()
}
}
/// <p>The input configuration of a batch segment job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchSegmentJobInput {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub s3_data_source: std::option::Option<crate::model::S3DataConfig>,
}
impl BatchSegmentJobInput {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn s3_data_source(&self) -> std::option::Option<&crate::model::S3DataConfig> {
self.s3_data_source.as_ref()
}
}
impl std::fmt::Debug for BatchSegmentJobInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchSegmentJobInput");
formatter.field("s3_data_source", &self.s3_data_source);
formatter.finish()
}
}
/// See [`BatchSegmentJobInput`](crate::model::BatchSegmentJobInput)
pub mod batch_segment_job_input {
/// A builder for [`BatchSegmentJobInput`](crate::model::BatchSegmentJobInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) s3_data_source: std::option::Option<crate::model::S3DataConfig>,
}
impl Builder {
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn s3_data_source(mut self, input: crate::model::S3DataConfig) -> Self {
self.s3_data_source = Some(input);
self
}
/// <p>The configuration details of an Amazon S3 input or output bucket.</p>
pub fn set_s3_data_source(
mut self,
input: std::option::Option<crate::model::S3DataConfig>,
) -> Self {
self.s3_data_source = input;
self
}
/// Consumes the builder and constructs a [`BatchSegmentJobInput`](crate::model::BatchSegmentJobInput)
pub fn build(self) -> crate::model::BatchSegmentJobInput {
crate::model::BatchSegmentJobInput {
s3_data_source: self.s3_data_source,
}
}
}
}
impl BatchSegmentJobInput {
/// Creates a new builder-style object to manufacture [`BatchSegmentJobInput`](crate::model::BatchSegmentJobInput)
pub fn builder() -> crate::model::batch_segment_job_input::Builder {
crate::model::batch_segment_job_input::Builder::default()
}
}
/// <p>Contains information on a batch inference job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchInferenceJob {
/// <p>The name of the batch inference job.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub batch_inference_job_arn: std::option::Option<std::string::String>,
/// <p>The ARN of the filter used on the batch inference job.</p>
pub filter_arn: std::option::Option<std::string::String>,
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub failure_reason: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the solution version from which the batch inference job
/// was created.</p>
pub solution_version_arn: std::option::Option<std::string::String>,
/// <p>The number of recommendations generated by the batch inference job. This number includes
/// the error messages generated for failed input records.</p>
pub num_results: std::option::Option<i32>,
/// <p>The Amazon S3 path that leads to the input data used to generate the batch inference
/// job.</p>
pub job_input: std::option::Option<crate::model::BatchInferenceJobInput>,
/// <p>The Amazon S3 bucket that contains the output data generated by the batch inference job.</p>
pub job_output: std::option::Option<crate::model::BatchInferenceJobOutput>,
/// <p>A string to string map of the configuration details of a batch inference job.</p>
pub batch_inference_job_config: std::option::Option<crate::model::BatchInferenceJobConfig>,
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch inference job.</p>
pub role_arn: std::option::Option<std::string::String>,
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub status: std::option::Option<std::string::String>,
/// <p>The time at which the batch inference job was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The time at which the batch inference job was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl BatchInferenceJob {
/// <p>The name of the batch inference job.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub fn batch_inference_job_arn(&self) -> std::option::Option<&str> {
self.batch_inference_job_arn.as_deref()
}
/// <p>The ARN of the filter used on the batch inference job.</p>
pub fn filter_arn(&self) -> std::option::Option<&str> {
self.filter_arn.as_deref()
}
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub fn failure_reason(&self) -> std::option::Option<&str> {
self.failure_reason.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the solution version from which the batch inference job
/// was created.</p>
pub fn solution_version_arn(&self) -> std::option::Option<&str> {
self.solution_version_arn.as_deref()
}
/// <p>The number of recommendations generated by the batch inference job. This number includes
/// the error messages generated for failed input records.</p>
pub fn num_results(&self) -> std::option::Option<i32> {
self.num_results
}
/// <p>The Amazon S3 path that leads to the input data used to generate the batch inference
/// job.</p>
pub fn job_input(&self) -> std::option::Option<&crate::model::BatchInferenceJobInput> {
self.job_input.as_ref()
}
/// <p>The Amazon S3 bucket that contains the output data generated by the batch inference job.</p>
pub fn job_output(&self) -> std::option::Option<&crate::model::BatchInferenceJobOutput> {
self.job_output.as_ref()
}
/// <p>A string to string map of the configuration details of a batch inference job.</p>
pub fn batch_inference_job_config(
&self,
) -> std::option::Option<&crate::model::BatchInferenceJobConfig> {
self.batch_inference_job_config.as_ref()
}
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch inference job.</p>
pub fn role_arn(&self) -> std::option::Option<&str> {
self.role_arn.as_deref()
}
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(&self) -> std::option::Option<&str> {
self.status.as_deref()
}
/// <p>The time at which the batch inference job was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The time at which the batch inference job was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for BatchInferenceJob {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchInferenceJob");
formatter.field("job_name", &self.job_name);
formatter.field("batch_inference_job_arn", &self.batch_inference_job_arn);
formatter.field("filter_arn", &self.filter_arn);
formatter.field("failure_reason", &self.failure_reason);
formatter.field("solution_version_arn", &self.solution_version_arn);
formatter.field("num_results", &self.num_results);
formatter.field("job_input", &self.job_input);
formatter.field("job_output", &self.job_output);
formatter.field(
"batch_inference_job_config",
&self.batch_inference_job_config,
);
formatter.field("role_arn", &self.role_arn);
formatter.field("status", &self.status);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`BatchInferenceJob`](crate::model::BatchInferenceJob)
pub mod batch_inference_job {
/// A builder for [`BatchInferenceJob`](crate::model::BatchInferenceJob)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) batch_inference_job_arn: std::option::Option<std::string::String>,
pub(crate) filter_arn: std::option::Option<std::string::String>,
pub(crate) failure_reason: std::option::Option<std::string::String>,
pub(crate) solution_version_arn: std::option::Option<std::string::String>,
pub(crate) num_results: std::option::Option<i32>,
pub(crate) job_input: std::option::Option<crate::model::BatchInferenceJobInput>,
pub(crate) job_output: std::option::Option<crate::model::BatchInferenceJobOutput>,
pub(crate) batch_inference_job_config:
std::option::Option<crate::model::BatchInferenceJobConfig>,
pub(crate) role_arn: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the batch inference job.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the batch inference job.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub fn batch_inference_job_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.batch_inference_job_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
pub fn set_batch_inference_job_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.batch_inference_job_arn = input;
self
}
/// <p>The ARN of the filter used on the batch inference job.</p>
pub fn filter_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.filter_arn = Some(input.into());
self
}
/// <p>The ARN of the filter used on the batch inference job.</p>
pub fn set_filter_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.filter_arn = input;
self
}
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub fn failure_reason(mut self, input: impl Into<std::string::String>) -> Self {
self.failure_reason = Some(input.into());
self
}
/// <p>If the batch inference job failed, the reason for the failure.</p>
pub fn set_failure_reason(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.failure_reason = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version from which the batch inference job
/// was created.</p>
pub fn solution_version_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.solution_version_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the solution version from which the batch inference job
/// was created.</p>
pub fn set_solution_version_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.solution_version_arn = input;
self
}
/// <p>The number of recommendations generated by the batch inference job. This number includes
/// the error messages generated for failed input records.</p>
pub fn num_results(mut self, input: i32) -> Self {
self.num_results = Some(input);
self
}
/// <p>The number of recommendations generated by the batch inference job. This number includes
/// the error messages generated for failed input records.</p>
pub fn set_num_results(mut self, input: std::option::Option<i32>) -> Self {
self.num_results = input;
self
}
/// <p>The Amazon S3 path that leads to the input data used to generate the batch inference
/// job.</p>
pub fn job_input(mut self, input: crate::model::BatchInferenceJobInput) -> Self {
self.job_input = Some(input);
self
}
/// <p>The Amazon S3 path that leads to the input data used to generate the batch inference
/// job.</p>
pub fn set_job_input(
mut self,
input: std::option::Option<crate::model::BatchInferenceJobInput>,
) -> Self {
self.job_input = input;
self
}
/// <p>The Amazon S3 bucket that contains the output data generated by the batch inference job.</p>
pub fn job_output(mut self, input: crate::model::BatchInferenceJobOutput) -> Self {
self.job_output = Some(input);
self
}
/// <p>The Amazon S3 bucket that contains the output data generated by the batch inference job.</p>
pub fn set_job_output(
mut self,
input: std::option::Option<crate::model::BatchInferenceJobOutput>,
) -> Self {
self.job_output = input;
self
}
/// <p>A string to string map of the configuration details of a batch inference job.</p>
pub fn batch_inference_job_config(
mut self,
input: crate::model::BatchInferenceJobConfig,
) -> Self {
self.batch_inference_job_config = Some(input);
self
}
/// <p>A string to string map of the configuration details of a batch inference job.</p>
pub fn set_batch_inference_job_config(
mut self,
input: std::option::Option<crate::model::BatchInferenceJobConfig>,
) -> Self {
self.batch_inference_job_config = input;
self
}
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch inference job.</p>
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.role_arn = Some(input.into());
self
}
/// <p>The ARN of the Amazon Identity and Access Management (IAM) role that requested the batch inference job.</p>
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.role_arn = input;
self
}
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
/// <p>The status of the batch inference job. The status is one of the following values:</p>
/// <ul>
/// <li>
/// <p>PENDING</p>
/// </li>
/// <li>
/// <p>IN PROGRESS</p>
/// </li>
/// <li>
/// <p>ACTIVE</p>
/// </li>
/// <li>
/// <p>CREATE FAILED</p>
/// </li>
/// </ul>
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// <p>The time at which the batch inference job was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The time at which the batch inference job was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The time at which the batch inference job was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The time at which the batch inference job was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`BatchInferenceJob`](crate::model::BatchInferenceJob)
pub fn build(self) -> crate::model::BatchInferenceJob {
crate::model::BatchInferenceJob {
job_name: self.job_name,
batch_inference_job_arn: self.batch_inference_job_arn,
filter_arn: self.filter_arn,
failure_reason: self.failure_reason,
solution_version_arn: self.solution_version_arn,
num_results: self.num_results,
job_input: self.job_input,
job_output: self.job_output,
batch_inference_job_config: self.batch_inference_job_config,
role_arn: self.role_arn,
status: self.status,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl BatchInferenceJob {
/// Creates a new builder-style object to manufacture [`BatchInferenceJob`](crate::model::BatchInferenceJob)
pub fn builder() -> crate::model::batch_inference_job::Builder {
crate::model::batch_inference_job::Builder::default()
}
}
/// <p>The configuration details of a batch inference job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchInferenceJobConfig {
/// <p>A string to string map specifying the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items.
/// See <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>.</p>
pub item_exploration_config:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
}
impl BatchInferenceJobConfig {
/// <p>A string to string map specifying the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items.
/// See <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>.</p>
pub fn item_exploration_config(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.item_exploration_config.as_ref()
}
}
impl std::fmt::Debug for BatchInferenceJobConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchInferenceJobConfig");
formatter.field("item_exploration_config", &self.item_exploration_config);
formatter.finish()
}
}
/// See [`BatchInferenceJobConfig`](crate::model::BatchInferenceJobConfig)
pub mod batch_inference_job_config {
/// A builder for [`BatchInferenceJobConfig`](crate::model::BatchInferenceJobConfig)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) item_exploration_config: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
}
impl Builder {
/// Adds a key-value pair to `item_exploration_config`.
///
/// To override the contents of this collection use [`set_item_exploration_config`](Self::set_item_exploration_config).
///
/// <p>A string to string map specifying the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items.
/// See <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>.</p>
pub fn item_exploration_config(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.item_exploration_config.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.item_exploration_config = Some(hash_map);
self
}
/// <p>A string to string map specifying the exploration configuration hyperparameters, including <code>explorationWeight</code> and
/// <code>explorationItemAgeCutOff</code>, you want to use to configure the amount of item exploration Amazon Personalize uses when
/// recommending items.
/// See <a href="https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-new-item-USER_PERSONALIZATION.html">User-Personalization</a>.</p>
pub fn set_item_exploration_config(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.item_exploration_config = input;
self
}
/// Consumes the builder and constructs a [`BatchInferenceJobConfig`](crate::model::BatchInferenceJobConfig)
pub fn build(self) -> crate::model::BatchInferenceJobConfig {
crate::model::BatchInferenceJobConfig {
item_exploration_config: self.item_exploration_config,
}
}
}
}
impl BatchInferenceJobConfig {
/// Creates a new builder-style object to manufacture [`BatchInferenceJobConfig`](crate::model::BatchInferenceJobConfig)
pub fn builder() -> crate::model::batch_inference_job_config::Builder {
crate::model::batch_inference_job_config::Builder::default()
}
}
/// <p>The output configuration parameters of a batch inference job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchInferenceJobOutput {
/// <p>Information on the Amazon S3 bucket in which the batch inference job's output is stored.</p>
pub s3_data_destination: std::option::Option<crate::model::S3DataConfig>,
}
impl BatchInferenceJobOutput {
/// <p>Information on the Amazon S3 bucket in which the batch inference job's output is stored.</p>
pub fn s3_data_destination(&self) -> std::option::Option<&crate::model::S3DataConfig> {
self.s3_data_destination.as_ref()
}
}
impl std::fmt::Debug for BatchInferenceJobOutput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchInferenceJobOutput");
formatter.field("s3_data_destination", &self.s3_data_destination);
formatter.finish()
}
}
/// See [`BatchInferenceJobOutput`](crate::model::BatchInferenceJobOutput)
pub mod batch_inference_job_output {
/// A builder for [`BatchInferenceJobOutput`](crate::model::BatchInferenceJobOutput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) s3_data_destination: std::option::Option<crate::model::S3DataConfig>,
}
impl Builder {
/// <p>Information on the Amazon S3 bucket in which the batch inference job's output is stored.</p>
pub fn s3_data_destination(mut self, input: crate::model::S3DataConfig) -> Self {
self.s3_data_destination = Some(input);
self
}
/// <p>Information on the Amazon S3 bucket in which the batch inference job's output is stored.</p>
pub fn set_s3_data_destination(
mut self,
input: std::option::Option<crate::model::S3DataConfig>,
) -> Self {
self.s3_data_destination = input;
self
}
/// Consumes the builder and constructs a [`BatchInferenceJobOutput`](crate::model::BatchInferenceJobOutput)
pub fn build(self) -> crate::model::BatchInferenceJobOutput {
crate::model::BatchInferenceJobOutput {
s3_data_destination: self.s3_data_destination,
}
}
}
}
impl BatchInferenceJobOutput {
/// Creates a new builder-style object to manufacture [`BatchInferenceJobOutput`](crate::model::BatchInferenceJobOutput)
pub fn builder() -> crate::model::batch_inference_job_output::Builder {
crate::model::batch_inference_job_output::Builder::default()
}
}
/// <p>The input configuration of a batch inference job.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct BatchInferenceJobInput {
/// <p>The URI of the Amazon S3 location that contains your input data. The Amazon S3 bucket must be in the
/// same region as the API endpoint you are calling.</p>
pub s3_data_source: std::option::Option<crate::model::S3DataConfig>,
}
impl BatchInferenceJobInput {
/// <p>The URI of the Amazon S3 location that contains your input data. The Amazon S3 bucket must be in the
/// same region as the API endpoint you are calling.</p>
pub fn s3_data_source(&self) -> std::option::Option<&crate::model::S3DataConfig> {
self.s3_data_source.as_ref()
}
}
impl std::fmt::Debug for BatchInferenceJobInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("BatchInferenceJobInput");
formatter.field("s3_data_source", &self.s3_data_source);
formatter.finish()
}
}
/// See [`BatchInferenceJobInput`](crate::model::BatchInferenceJobInput)
pub mod batch_inference_job_input {
/// A builder for [`BatchInferenceJobInput`](crate::model::BatchInferenceJobInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) s3_data_source: std::option::Option<crate::model::S3DataConfig>,
}
impl Builder {
/// <p>The URI of the Amazon S3 location that contains your input data. The Amazon S3 bucket must be in the
/// same region as the API endpoint you are calling.</p>
pub fn s3_data_source(mut self, input: crate::model::S3DataConfig) -> Self {
self.s3_data_source = Some(input);
self
}
/// <p>The URI of the Amazon S3 location that contains your input data. The Amazon S3 bucket must be in the
/// same region as the API endpoint you are calling.</p>
pub fn set_s3_data_source(
mut self,
input: std::option::Option<crate::model::S3DataConfig>,
) -> Self {
self.s3_data_source = input;
self
}
/// Consumes the builder and constructs a [`BatchInferenceJobInput`](crate::model::BatchInferenceJobInput)
pub fn build(self) -> crate::model::BatchInferenceJobInput {
crate::model::BatchInferenceJobInput {
s3_data_source: self.s3_data_source,
}
}
}
}
impl BatchInferenceJobInput {
/// Creates a new builder-style object to manufacture [`BatchInferenceJobInput`](crate::model::BatchInferenceJobInput)
pub fn builder() -> crate::model::batch_inference_job_input::Builder {
crate::model::batch_inference_job_input::Builder::default()
}
}
/// <p>Describes a custom algorithm.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Algorithm {
/// <p>The name of the algorithm.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the algorithm.</p>
pub algorithm_arn: std::option::Option<std::string::String>,
/// <p>The URI of the Docker container for the algorithm image.</p>
pub algorithm_image: std::option::Option<crate::model::AlgorithmImage>,
/// <p>Specifies the default hyperparameters.</p>
pub default_hyper_parameters:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>Specifies the default hyperparameters, their ranges, and whether they
/// are tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
pub default_hyper_parameter_ranges:
std::option::Option<crate::model::DefaultHyperParameterRanges>,
/// <p>Specifies the default maximum number of training jobs and parallel training jobs.</p>
pub default_resource_config:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The training input mode.</p>
pub training_input_mode: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the role.</p>
pub role_arn: std::option::Option<std::string::String>,
/// <p>The date and time (in Unix time) that the algorithm was created.</p>
pub creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time (in Unix time) that the algorithm was last updated.</p>
pub last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Algorithm {
/// <p>The name of the algorithm.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the algorithm.</p>
pub fn algorithm_arn(&self) -> std::option::Option<&str> {
self.algorithm_arn.as_deref()
}
/// <p>The URI of the Docker container for the algorithm image.</p>
pub fn algorithm_image(&self) -> std::option::Option<&crate::model::AlgorithmImage> {
self.algorithm_image.as_ref()
}
/// <p>Specifies the default hyperparameters.</p>
pub fn default_hyper_parameters(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.default_hyper_parameters.as_ref()
}
/// <p>Specifies the default hyperparameters, their ranges, and whether they
/// are tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
pub fn default_hyper_parameter_ranges(
&self,
) -> std::option::Option<&crate::model::DefaultHyperParameterRanges> {
self.default_hyper_parameter_ranges.as_ref()
}
/// <p>Specifies the default maximum number of training jobs and parallel training jobs.</p>
pub fn default_resource_config(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.default_resource_config.as_ref()
}
/// <p>The training input mode.</p>
pub fn training_input_mode(&self) -> std::option::Option<&str> {
self.training_input_mode.as_deref()
}
/// <p>The Amazon Resource Name (ARN) of the role.</p>
pub fn role_arn(&self) -> std::option::Option<&str> {
self.role_arn.as_deref()
}
/// <p>The date and time (in Unix time) that the algorithm was created.</p>
pub fn creation_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.creation_date_time.as_ref()
}
/// <p>The date and time (in Unix time) that the algorithm was last updated.</p>
pub fn last_updated_date_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.last_updated_date_time.as_ref()
}
}
impl std::fmt::Debug for Algorithm {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Algorithm");
formatter.field("name", &self.name);
formatter.field("algorithm_arn", &self.algorithm_arn);
formatter.field("algorithm_image", &self.algorithm_image);
formatter.field("default_hyper_parameters", &self.default_hyper_parameters);
formatter.field(
"default_hyper_parameter_ranges",
&self.default_hyper_parameter_ranges,
);
formatter.field("default_resource_config", &self.default_resource_config);
formatter.field("training_input_mode", &self.training_input_mode);
formatter.field("role_arn", &self.role_arn);
formatter.field("creation_date_time", &self.creation_date_time);
formatter.field("last_updated_date_time", &self.last_updated_date_time);
formatter.finish()
}
}
/// See [`Algorithm`](crate::model::Algorithm)
pub mod algorithm {
/// A builder for [`Algorithm`](crate::model::Algorithm)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) algorithm_arn: std::option::Option<std::string::String>,
pub(crate) algorithm_image: std::option::Option<crate::model::AlgorithmImage>,
pub(crate) default_hyper_parameters: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) default_hyper_parameter_ranges:
std::option::Option<crate::model::DefaultHyperParameterRanges>,
pub(crate) default_resource_config: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) training_input_mode: std::option::Option<std::string::String>,
pub(crate) role_arn: std::option::Option<std::string::String>,
pub(crate) creation_date_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) last_updated_date_time: std::option::Option<aws_smithy_types::DateTime>,
}
impl Builder {
/// <p>The name of the algorithm.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the algorithm.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the algorithm.</p>
pub fn algorithm_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.algorithm_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the algorithm.</p>
pub fn set_algorithm_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.algorithm_arn = input;
self
}
/// <p>The URI of the Docker container for the algorithm image.</p>
pub fn algorithm_image(mut self, input: crate::model::AlgorithmImage) -> Self {
self.algorithm_image = Some(input);
self
}
/// <p>The URI of the Docker container for the algorithm image.</p>
pub fn set_algorithm_image(
mut self,
input: std::option::Option<crate::model::AlgorithmImage>,
) -> Self {
self.algorithm_image = input;
self
}
/// Adds a key-value pair to `default_hyper_parameters`.
///
/// To override the contents of this collection use [`set_default_hyper_parameters`](Self::set_default_hyper_parameters).
///
/// <p>Specifies the default hyperparameters.</p>
pub fn default_hyper_parameters(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.default_hyper_parameters.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.default_hyper_parameters = Some(hash_map);
self
}
/// <p>Specifies the default hyperparameters.</p>
pub fn set_default_hyper_parameters(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.default_hyper_parameters = input;
self
}
/// <p>Specifies the default hyperparameters, their ranges, and whether they
/// are tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
pub fn default_hyper_parameter_ranges(
mut self,
input: crate::model::DefaultHyperParameterRanges,
) -> Self {
self.default_hyper_parameter_ranges = Some(input);
self
}
/// <p>Specifies the default hyperparameters, their ranges, and whether they
/// are tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
pub fn set_default_hyper_parameter_ranges(
mut self,
input: std::option::Option<crate::model::DefaultHyperParameterRanges>,
) -> Self {
self.default_hyper_parameter_ranges = input;
self
}
/// Adds a key-value pair to `default_resource_config`.
///
/// To override the contents of this collection use [`set_default_resource_config`](Self::set_default_resource_config).
///
/// <p>Specifies the default maximum number of training jobs and parallel training jobs.</p>
pub fn default_resource_config(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.default_resource_config.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.default_resource_config = Some(hash_map);
self
}
/// <p>Specifies the default maximum number of training jobs and parallel training jobs.</p>
pub fn set_default_resource_config(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.default_resource_config = input;
self
}
/// <p>The training input mode.</p>
pub fn training_input_mode(mut self, input: impl Into<std::string::String>) -> Self {
self.training_input_mode = Some(input.into());
self
}
/// <p>The training input mode.</p>
pub fn set_training_input_mode(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.training_input_mode = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the role.</p>
pub fn role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.role_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the role.</p>
pub fn set_role_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.role_arn = input;
self
}
/// <p>The date and time (in Unix time) that the algorithm was created.</p>
pub fn creation_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.creation_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the algorithm was created.</p>
pub fn set_creation_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.creation_date_time = input;
self
}
/// <p>The date and time (in Unix time) that the algorithm was last updated.</p>
pub fn last_updated_date_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.last_updated_date_time = Some(input);
self
}
/// <p>The date and time (in Unix time) that the algorithm was last updated.</p>
pub fn set_last_updated_date_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.last_updated_date_time = input;
self
}
/// Consumes the builder and constructs a [`Algorithm`](crate::model::Algorithm)
pub fn build(self) -> crate::model::Algorithm {
crate::model::Algorithm {
name: self.name,
algorithm_arn: self.algorithm_arn,
algorithm_image: self.algorithm_image,
default_hyper_parameters: self.default_hyper_parameters,
default_hyper_parameter_ranges: self.default_hyper_parameter_ranges,
default_resource_config: self.default_resource_config,
training_input_mode: self.training_input_mode,
role_arn: self.role_arn,
creation_date_time: self.creation_date_time,
last_updated_date_time: self.last_updated_date_time,
}
}
}
}
impl Algorithm {
/// Creates a new builder-style object to manufacture [`Algorithm`](crate::model::Algorithm)
pub fn builder() -> crate::model::algorithm::Builder {
crate::model::algorithm::Builder::default()
}
}
/// <p>Specifies the hyperparameters and their default ranges.
/// Hyperparameters can be categorical, continuous, or integer-valued.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DefaultHyperParameterRanges {
/// <p>The integer-valued hyperparameters and their default ranges.</p>
pub integer_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::DefaultIntegerHyperParameterRange>>,
/// <p>The continuous hyperparameters and their default ranges.</p>
pub continuous_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::DefaultContinuousHyperParameterRange>>,
/// <p>The categorical hyperparameters and their default ranges.</p>
pub categorical_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::DefaultCategoricalHyperParameterRange>>,
}
impl DefaultHyperParameterRanges {
/// <p>The integer-valued hyperparameters and their default ranges.</p>
pub fn integer_hyper_parameter_ranges(
&self,
) -> std::option::Option<&[crate::model::DefaultIntegerHyperParameterRange]> {
self.integer_hyper_parameter_ranges.as_deref()
}
/// <p>The continuous hyperparameters and their default ranges.</p>
pub fn continuous_hyper_parameter_ranges(
&self,
) -> std::option::Option<&[crate::model::DefaultContinuousHyperParameterRange]> {
self.continuous_hyper_parameter_ranges.as_deref()
}
/// <p>The categorical hyperparameters and their default ranges.</p>
pub fn categorical_hyper_parameter_ranges(
&self,
) -> std::option::Option<&[crate::model::DefaultCategoricalHyperParameterRange]> {
self.categorical_hyper_parameter_ranges.as_deref()
}
}
impl std::fmt::Debug for DefaultHyperParameterRanges {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DefaultHyperParameterRanges");
formatter.field(
"integer_hyper_parameter_ranges",
&self.integer_hyper_parameter_ranges,
);
formatter.field(
"continuous_hyper_parameter_ranges",
&self.continuous_hyper_parameter_ranges,
);
formatter.field(
"categorical_hyper_parameter_ranges",
&self.categorical_hyper_parameter_ranges,
);
formatter.finish()
}
}
/// See [`DefaultHyperParameterRanges`](crate::model::DefaultHyperParameterRanges)
pub mod default_hyper_parameter_ranges {
/// A builder for [`DefaultHyperParameterRanges`](crate::model::DefaultHyperParameterRanges)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) integer_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::DefaultIntegerHyperParameterRange>>,
pub(crate) continuous_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::DefaultContinuousHyperParameterRange>>,
pub(crate) categorical_hyper_parameter_ranges:
std::option::Option<std::vec::Vec<crate::model::DefaultCategoricalHyperParameterRange>>,
}
impl Builder {
/// Appends an item to `integer_hyper_parameter_ranges`.
///
/// To override the contents of this collection use [`set_integer_hyper_parameter_ranges`](Self::set_integer_hyper_parameter_ranges).
///
/// <p>The integer-valued hyperparameters and their default ranges.</p>
pub fn integer_hyper_parameter_ranges(
mut self,
input: impl Into<crate::model::DefaultIntegerHyperParameterRange>,
) -> Self {
let mut v = self.integer_hyper_parameter_ranges.unwrap_or_default();
v.push(input.into());
self.integer_hyper_parameter_ranges = Some(v);
self
}
/// <p>The integer-valued hyperparameters and their default ranges.</p>
pub fn set_integer_hyper_parameter_ranges(
mut self,
input: std::option::Option<
std::vec::Vec<crate::model::DefaultIntegerHyperParameterRange>,
>,
) -> Self {
self.integer_hyper_parameter_ranges = input;
self
}
/// Appends an item to `continuous_hyper_parameter_ranges`.
///
/// To override the contents of this collection use [`set_continuous_hyper_parameter_ranges`](Self::set_continuous_hyper_parameter_ranges).
///
/// <p>The continuous hyperparameters and their default ranges.</p>
pub fn continuous_hyper_parameter_ranges(
mut self,
input: impl Into<crate::model::DefaultContinuousHyperParameterRange>,
) -> Self {
let mut v = self.continuous_hyper_parameter_ranges.unwrap_or_default();
v.push(input.into());
self.continuous_hyper_parameter_ranges = Some(v);
self
}
/// <p>The continuous hyperparameters and their default ranges.</p>
pub fn set_continuous_hyper_parameter_ranges(
mut self,
input: std::option::Option<
std::vec::Vec<crate::model::DefaultContinuousHyperParameterRange>,
>,
) -> Self {
self.continuous_hyper_parameter_ranges = input;
self
}
/// Appends an item to `categorical_hyper_parameter_ranges`.
///
/// To override the contents of this collection use [`set_categorical_hyper_parameter_ranges`](Self::set_categorical_hyper_parameter_ranges).
///
/// <p>The categorical hyperparameters and their default ranges.</p>
pub fn categorical_hyper_parameter_ranges(
mut self,
input: impl Into<crate::model::DefaultCategoricalHyperParameterRange>,
) -> Self {
let mut v = self.categorical_hyper_parameter_ranges.unwrap_or_default();
v.push(input.into());
self.categorical_hyper_parameter_ranges = Some(v);
self
}
/// <p>The categorical hyperparameters and their default ranges.</p>
pub fn set_categorical_hyper_parameter_ranges(
mut self,
input: std::option::Option<
std::vec::Vec<crate::model::DefaultCategoricalHyperParameterRange>,
>,
) -> Self {
self.categorical_hyper_parameter_ranges = input;
self
}
/// Consumes the builder and constructs a [`DefaultHyperParameterRanges`](crate::model::DefaultHyperParameterRanges)
pub fn build(self) -> crate::model::DefaultHyperParameterRanges {
crate::model::DefaultHyperParameterRanges {
integer_hyper_parameter_ranges: self.integer_hyper_parameter_ranges,
continuous_hyper_parameter_ranges: self.continuous_hyper_parameter_ranges,
categorical_hyper_parameter_ranges: self.categorical_hyper_parameter_ranges,
}
}
}
}
impl DefaultHyperParameterRanges {
/// Creates a new builder-style object to manufacture [`DefaultHyperParameterRanges`](crate::model::DefaultHyperParameterRanges)
pub fn builder() -> crate::model::default_hyper_parameter_ranges::Builder {
crate::model::default_hyper_parameter_ranges::Builder::default()
}
}
/// <p>Provides the name and default range of a categorical hyperparameter
/// and whether the hyperparameter is tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DefaultCategoricalHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>A list of the categories for the hyperparameter.</p>
pub values: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>Whether the hyperparameter is tunable.</p>
pub is_tunable: bool,
}
impl DefaultCategoricalHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>A list of the categories for the hyperparameter.</p>
pub fn values(&self) -> std::option::Option<&[std::string::String]> {
self.values.as_deref()
}
/// <p>Whether the hyperparameter is tunable.</p>
pub fn is_tunable(&self) -> bool {
self.is_tunable
}
}
impl std::fmt::Debug for DefaultCategoricalHyperParameterRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DefaultCategoricalHyperParameterRange");
formatter.field("name", &self.name);
formatter.field("values", &self.values);
formatter.field("is_tunable", &self.is_tunable);
formatter.finish()
}
}
/// See [`DefaultCategoricalHyperParameterRange`](crate::model::DefaultCategoricalHyperParameterRange)
pub mod default_categorical_hyper_parameter_range {
/// A builder for [`DefaultCategoricalHyperParameterRange`](crate::model::DefaultCategoricalHyperParameterRange)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) values: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) is_tunable: std::option::Option<bool>,
}
impl Builder {
/// <p>The name of the hyperparameter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the hyperparameter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// Appends an item to `values`.
///
/// To override the contents of this collection use [`set_values`](Self::set_values).
///
/// <p>A list of the categories for the hyperparameter.</p>
pub fn values(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.values.unwrap_or_default();
v.push(input.into());
self.values = Some(v);
self
}
/// <p>A list of the categories for the hyperparameter.</p>
pub fn set_values(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.values = input;
self
}
/// <p>Whether the hyperparameter is tunable.</p>
pub fn is_tunable(mut self, input: bool) -> Self {
self.is_tunable = Some(input);
self
}
/// <p>Whether the hyperparameter is tunable.</p>
pub fn set_is_tunable(mut self, input: std::option::Option<bool>) -> Self {
self.is_tunable = input;
self
}
/// Consumes the builder and constructs a [`DefaultCategoricalHyperParameterRange`](crate::model::DefaultCategoricalHyperParameterRange)
pub fn build(self) -> crate::model::DefaultCategoricalHyperParameterRange {
crate::model::DefaultCategoricalHyperParameterRange {
name: self.name,
values: self.values,
is_tunable: self.is_tunable.unwrap_or_default(),
}
}
}
}
impl DefaultCategoricalHyperParameterRange {
/// Creates a new builder-style object to manufacture [`DefaultCategoricalHyperParameterRange`](crate::model::DefaultCategoricalHyperParameterRange)
pub fn builder() -> crate::model::default_categorical_hyper_parameter_range::Builder {
crate::model::default_categorical_hyper_parameter_range::Builder::default()
}
}
/// <p>Provides the name and default range of a continuous hyperparameter
/// and whether the hyperparameter is tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DefaultContinuousHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The minimum allowable value for the hyperparameter.</p>
pub min_value: f64,
/// <p>The maximum allowable value for the hyperparameter.</p>
pub max_value: f64,
/// <p>Whether the hyperparameter is tunable.</p>
pub is_tunable: bool,
}
impl DefaultContinuousHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(&self) -> f64 {
self.min_value
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(&self) -> f64 {
self.max_value
}
/// <p>Whether the hyperparameter is tunable.</p>
pub fn is_tunable(&self) -> bool {
self.is_tunable
}
}
impl std::fmt::Debug for DefaultContinuousHyperParameterRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DefaultContinuousHyperParameterRange");
formatter.field("name", &self.name);
formatter.field("min_value", &self.min_value);
formatter.field("max_value", &self.max_value);
formatter.field("is_tunable", &self.is_tunable);
formatter.finish()
}
}
/// See [`DefaultContinuousHyperParameterRange`](crate::model::DefaultContinuousHyperParameterRange)
pub mod default_continuous_hyper_parameter_range {
/// A builder for [`DefaultContinuousHyperParameterRange`](crate::model::DefaultContinuousHyperParameterRange)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) min_value: std::option::Option<f64>,
pub(crate) max_value: std::option::Option<f64>,
pub(crate) is_tunable: std::option::Option<bool>,
}
impl Builder {
/// <p>The name of the hyperparameter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the hyperparameter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(mut self, input: f64) -> Self {
self.min_value = Some(input);
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn set_min_value(mut self, input: std::option::Option<f64>) -> Self {
self.min_value = input;
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(mut self, input: f64) -> Self {
self.max_value = Some(input);
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn set_max_value(mut self, input: std::option::Option<f64>) -> Self {
self.max_value = input;
self
}
/// <p>Whether the hyperparameter is tunable.</p>
pub fn is_tunable(mut self, input: bool) -> Self {
self.is_tunable = Some(input);
self
}
/// <p>Whether the hyperparameter is tunable.</p>
pub fn set_is_tunable(mut self, input: std::option::Option<bool>) -> Self {
self.is_tunable = input;
self
}
/// Consumes the builder and constructs a [`DefaultContinuousHyperParameterRange`](crate::model::DefaultContinuousHyperParameterRange)
pub fn build(self) -> crate::model::DefaultContinuousHyperParameterRange {
crate::model::DefaultContinuousHyperParameterRange {
name: self.name,
min_value: self.min_value.unwrap_or_default(),
max_value: self.max_value.unwrap_or_default(),
is_tunable: self.is_tunable.unwrap_or_default(),
}
}
}
}
impl DefaultContinuousHyperParameterRange {
/// Creates a new builder-style object to manufacture [`DefaultContinuousHyperParameterRange`](crate::model::DefaultContinuousHyperParameterRange)
pub fn builder() -> crate::model::default_continuous_hyper_parameter_range::Builder {
crate::model::default_continuous_hyper_parameter_range::Builder::default()
}
}
/// <p>Provides the name and default range of a integer-valued hyperparameter
/// and whether the hyperparameter is tunable. A tunable hyperparameter can
/// have its value determined during hyperparameter optimization (HPO).</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DefaultIntegerHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The minimum allowable value for the hyperparameter.</p>
pub min_value: i32,
/// <p>The maximum allowable value for the hyperparameter.</p>
pub max_value: i32,
/// <p>Indicates whether the hyperparameter is tunable.</p>
pub is_tunable: bool,
}
impl DefaultIntegerHyperParameterRange {
/// <p>The name of the hyperparameter.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(&self) -> i32 {
self.min_value
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(&self) -> i32 {
self.max_value
}
/// <p>Indicates whether the hyperparameter is tunable.</p>
pub fn is_tunable(&self) -> bool {
self.is_tunable
}
}
impl std::fmt::Debug for DefaultIntegerHyperParameterRange {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DefaultIntegerHyperParameterRange");
formatter.field("name", &self.name);
formatter.field("min_value", &self.min_value);
formatter.field("max_value", &self.max_value);
formatter.field("is_tunable", &self.is_tunable);
formatter.finish()
}
}
/// See [`DefaultIntegerHyperParameterRange`](crate::model::DefaultIntegerHyperParameterRange)
pub mod default_integer_hyper_parameter_range {
/// A builder for [`DefaultIntegerHyperParameterRange`](crate::model::DefaultIntegerHyperParameterRange)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) min_value: std::option::Option<i32>,
pub(crate) max_value: std::option::Option<i32>,
pub(crate) is_tunable: std::option::Option<bool>,
}
impl Builder {
/// <p>The name of the hyperparameter.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the hyperparameter.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn min_value(mut self, input: i32) -> Self {
self.min_value = Some(input);
self
}
/// <p>The minimum allowable value for the hyperparameter.</p>
pub fn set_min_value(mut self, input: std::option::Option<i32>) -> Self {
self.min_value = input;
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn max_value(mut self, input: i32) -> Self {
self.max_value = Some(input);
self
}
/// <p>The maximum allowable value for the hyperparameter.</p>
pub fn set_max_value(mut self, input: std::option::Option<i32>) -> Self {
self.max_value = input;
self
}
/// <p>Indicates whether the hyperparameter is tunable.</p>
pub fn is_tunable(mut self, input: bool) -> Self {
self.is_tunable = Some(input);
self
}
/// <p>Indicates whether the hyperparameter is tunable.</p>
pub fn set_is_tunable(mut self, input: std::option::Option<bool>) -> Self {
self.is_tunable = input;
self
}
/// Consumes the builder and constructs a [`DefaultIntegerHyperParameterRange`](crate::model::DefaultIntegerHyperParameterRange)
pub fn build(self) -> crate::model::DefaultIntegerHyperParameterRange {
crate::model::DefaultIntegerHyperParameterRange {
name: self.name,
min_value: self.min_value.unwrap_or_default(),
max_value: self.max_value.unwrap_or_default(),
is_tunable: self.is_tunable.unwrap_or_default(),
}
}
}
}
impl DefaultIntegerHyperParameterRange {
/// Creates a new builder-style object to manufacture [`DefaultIntegerHyperParameterRange`](crate::model::DefaultIntegerHyperParameterRange)
pub fn builder() -> crate::model::default_integer_hyper_parameter_range::Builder {
crate::model::default_integer_hyper_parameter_range::Builder::default()
}
}
/// <p>Describes an algorithm image.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AlgorithmImage {
/// <p>The name of the algorithm image.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The URI of the Docker container for the algorithm image.</p>
pub docker_uri: std::option::Option<std::string::String>,
}
impl AlgorithmImage {
/// <p>The name of the algorithm image.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The URI of the Docker container for the algorithm image.</p>
pub fn docker_uri(&self) -> std::option::Option<&str> {
self.docker_uri.as_deref()
}
}
impl std::fmt::Debug for AlgorithmImage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AlgorithmImage");
formatter.field("name", &self.name);
formatter.field("docker_uri", &self.docker_uri);
formatter.finish()
}
}
/// See [`AlgorithmImage`](crate::model::AlgorithmImage)
pub mod algorithm_image {
/// A builder for [`AlgorithmImage`](crate::model::AlgorithmImage)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) docker_uri: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the algorithm image.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the algorithm image.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The URI of the Docker container for the algorithm image.</p>
pub fn docker_uri(mut self, input: impl Into<std::string::String>) -> Self {
self.docker_uri = Some(input.into());
self
}
/// <p>The URI of the Docker container for the algorithm image.</p>
pub fn set_docker_uri(mut self, input: std::option::Option<std::string::String>) -> Self {
self.docker_uri = input;
self
}
/// Consumes the builder and constructs a [`AlgorithmImage`](crate::model::AlgorithmImage)
pub fn build(self) -> crate::model::AlgorithmImage {
crate::model::AlgorithmImage {
name: self.name,
docker_uri: self.docker_uri,
}
}
}
}
impl AlgorithmImage {
/// Creates a new builder-style object to manufacture [`AlgorithmImage`](crate::model::AlgorithmImage)
pub fn builder() -> crate::model::algorithm_image::Builder {
crate::model::algorithm_image::Builder::default()
}
}
| 44.582832 | 211 | 0.611513 |
1dd3bd0ebda63bb31e6a0ee73ab67ca678096d9b | 67,681 | use crate::ast::{self, kw, HeapType};
use crate::parser::{Parse, Parser, Result};
use std::mem;
/// An expression, or a list of instructions, in the WebAssembly text format.
///
/// This expression type will parse s-expression-folded instructions into a flat
/// list of instructions for emission later on. The implicit `end` instruction
/// at the end of an expression is not included in the `instrs` field.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct Expression<'a> {
pub instrs: Box<[Instruction<'a>]>,
}
impl<'a> Parse<'a> for Expression<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
ExpressionParser::default().parse(parser)
}
}
/// Helper struct used to parse an `Expression` with helper methods and such.
///
/// The primary purpose of this is to avoid defining expression parsing as a
/// call-thread-stack recursive function. Since we're parsing user input that
/// runs the risk of blowing the call stack, so we want to be sure to use a heap
/// stack structure wherever possible.
#[derive(Default)]
struct ExpressionParser<'a> {
/// The flat list of instructions that we've parsed so far, and will
/// eventually become the final `Expression`.
instrs: Vec<Instruction<'a>>,
/// Descriptor of all our nested s-expr blocks. This only happens when
/// instructions themselves are nested.
stack: Vec<Level<'a>>,
}
enum Paren {
None,
Left,
Right,
}
/// A "kind" of nested block that we can be parsing inside of.
enum Level<'a> {
/// This is a normal `block` or `loop` or similar, where the instruction
/// payload here is pushed when the block is exited.
EndWith(Instruction<'a>),
/// This is a pretty special variant which means that we're parsing an `if`
/// statement, and the state of the `if` parsing is tracked internally in
/// the payload.
If(If<'a>),
/// This means we're either parsing inside of `(then ...)` or `(else ...)`
/// which don't correspond to terminating instructions, we're just in a
/// nested block.
IfArm,
/// Similar to `If` but for `Try` statements, which has simpler parsing
/// state to track.
Try(Try<'a>),
/// Similar to `IfArm` but for `(do ...)` and `(catch ...)` blocks.
TryArm,
}
/// Possible states of "what should be parsed next?" in an `if` expression.
enum If<'a> {
/// Only the `if` has been parsed, next thing to parse is the clause, if
/// any, of the `if` instruction.
Clause(Instruction<'a>),
/// Next thing to parse is the `then` block
Then(Instruction<'a>),
/// Next thing to parse is the `else` block
Else,
/// This `if` statement has finished parsing and if anything remains it's a
/// syntax error.
End,
}
/// Possible state of "what should be parsed next?" in a `try` expression.
enum Try<'a> {
/// Next thing to parse is the `do` block.
Do(Instruction<'a>),
/// Next thing to parse is `catch`/`catch_all`, or `unwind`.
CatchOrUnwind,
/// Next thing to parse is a `catch` block or `catch_all`.
Catch,
/// This `try` statement has finished parsing and if anything remains it's a
/// syntax error.
End,
}
impl<'a> ExpressionParser<'a> {
fn parse(mut self, parser: Parser<'a>) -> Result<Expression<'a>> {
// Here we parse instructions in a loop, and we do not recursively
// invoke this parse function to avoid blowing the stack on
// deeply-recursive parses.
//
// Our loop generally only finishes once there's no more input left int
// the `parser`. If there's some unclosed delimiters though (on our
// `stack`), then we also keep parsing to generate error messages if
// there's no input left.
while !parser.is_empty() || !self.stack.is_empty() {
// As a small ease-of-life adjustment here, if we're parsing inside
// of an `if block then we require that all sub-components are
// s-expressions surrounded by `(` and `)`, so verify that here.
if let Some(Level::If(_)) | Some(Level::Try(_)) = self.stack.last() {
if !parser.is_empty() && !parser.peek::<ast::LParen>() {
return Err(parser.error("expected `(`"));
}
}
match self.paren(parser)? {
// No parenthesis seen? Then we just parse the next instruction
// and move on.
Paren::None => self.instrs.push(parser.parse()?),
// If we see a left-parenthesis then things are a little
// special. We handle block-like instructions specially
// (`block`, `loop`, and `if`), and otherwise all other
// instructions simply get appended once we reach the end of the
// s-expression.
//
// In all cases here we push something onto the `stack` to get
// popped when the `)` character is seen.
Paren::Left => {
// First up is handling `if` parsing, which is funky in a
// whole bunch of ways. See the method internally for more
// information.
if self.handle_if_lparen(parser)? {
continue;
}
// Second, we handle `try` parsing, which is simpler than
// `if` but more complicated than, e.g., `block`.
if self.handle_try_lparen(parser)? {
continue;
}
match parser.parse()? {
// If block/loop show up then we just need to be sure to
// push an `end` instruction whenever the `)` token is
// seen
i @ Instruction::Block(_)
| i @ Instruction::Loop(_)
| i @ Instruction::Let(_) => {
self.instrs.push(i);
self.stack.push(Level::EndWith(Instruction::End(None)));
}
// Parsing an `if` instruction is super tricky, so we
// push an `If` scope and we let all our scope-based
// parsing handle the remaining items.
i @ Instruction::If(_) => {
self.stack.push(Level::If(If::Clause(i)));
}
// Parsing a `try` is easier than `if` but we also push
// a `Try` scope to handle the required nested blocks.
i @ Instruction::Try(_) => {
self.stack.push(Level::Try(Try::Do(i)));
}
// Anything else means that we're parsing a nested form
// such as `(i32.add ...)` which means that the
// instruction we parsed will be coming at the end.
other => self.stack.push(Level::EndWith(other)),
}
}
// If we registered a `)` token as being seen, then we're
// guaranteed there's an item in the `stack` stack for us to
// pop. We peel that off and take a look at what it says to do.
Paren::Right => match self.stack.pop().unwrap() {
Level::EndWith(i) => self.instrs.push(i),
Level::IfArm => {}
Level::TryArm => {}
// If an `if` statement hasn't parsed the clause or `then`
// block, then that's an error because there weren't enough
// items in the `if` statement. Otherwise we're just careful
// to terminate with an `end` instruction.
Level::If(If::Clause(_)) => {
return Err(parser.error("previous `if` had no clause"));
}
Level::If(If::Then(_)) => {
return Err(parser.error("previous `if` had no `then`"));
}
Level::If(_) => {
self.instrs.push(Instruction::End(None));
}
// Both `do` and `catch` are required in a `try` statement, so
// we will signal those errors here. Otherwise, terminate with
// an `end` instruction.
Level::Try(Try::Do(_)) => {
return Err(parser.error("previous `try` had no `do`"));
}
Level::Try(Try::CatchOrUnwind) => {
return Err(parser.error("previous `try` had no `catch`, `catch_all`, or `unwind`"));
}
Level::Try(_) => {
self.instrs.push(Instruction::End(None));
}
},
}
}
Ok(Expression {
instrs: self.instrs.into(),
})
}
/// Parses either `(`, `)`, or nothing.
fn paren(&self, parser: Parser<'a>) -> Result<Paren> {
parser.step(|cursor| {
Ok(match cursor.lparen() {
Some(rest) => (Paren::Left, rest),
None if self.stack.is_empty() => (Paren::None, cursor),
None => match cursor.rparen() {
Some(rest) => (Paren::Right, rest),
None => (Paren::None, cursor),
},
})
})
}
/// Handles all parsing of an `if` statement.
///
/// The syntactical form of an `if` stament looks like:
///
/// ```wat
/// (if $clause (then $then) (else $else))
/// ```
///
/// but it turns out we practically see a few things in the wild:
///
/// * inside the `(if ...)` every sub-thing is surrounded by parens
/// * The `then` and `else` keywords are optional
/// * The `$then` and `$else` blocks don't need to be surrounded by parens
///
/// That's all attempted to be handled here. The part about all sub-parts
/// being surrounded by `(` and `)` means that we hook into the `LParen`
/// parsing above to call this method there unconditionally.
///
/// Returns `true` if the rest of the arm above should be skipped, or
/// `false` if we should parse the next item as an instruction (because we
/// didn't handle the lparen here).
fn handle_if_lparen(&mut self, parser: Parser<'a>) -> Result<bool> {
// Only execute the code below if there's an `If` listed last.
let i = match self.stack.last_mut() {
Some(Level::If(i)) => i,
_ => return Ok(false),
};
// The first thing parsed in an `if` statement is the clause. If the
// clause starts with `then`, however, then we know to skip the clause
// and fall through to below.
if let If::Clause(if_instr) = i {
let instr = mem::replace(if_instr, Instruction::End(None));
*i = If::Then(instr);
if !parser.peek::<kw::then>() {
return Ok(false);
}
}
// All `if` statements are required to have a `then`. This is either the
// second s-expr (with or without a leading `then`) or the first s-expr
// with a leading `then`. The optionality of `then` isn't strictly what
// the text spec says but it matches wabt for now.
//
// Note that when we see the `then`, that's when we actually add the
// original `if` instruction to the stream.
if let If::Then(if_instr) = i {
let instr = mem::replace(if_instr, Instruction::End(None));
self.instrs.push(instr);
*i = If::Else;
if parser.parse::<Option<kw::then>>()?.is_some() {
self.stack.push(Level::IfArm);
return Ok(true);
}
return Ok(false);
}
// effectively the same as the `then` parsing above
if let If::Else = i {
self.instrs.push(Instruction::Else(None));
if parser.parse::<Option<kw::r#else>>()?.is_some() {
if parser.is_empty() {
self.instrs.pop();
}
self.stack.push(Level::IfArm);
return Ok(true);
}
*i = If::End;
return Ok(false);
}
// If we made it this far then we're at `If::End` which means that there
// were too many s-expressions inside the `(if)` and we don't want to
// parse anything else.
Err(parser.error("too many payloads inside of `(if)`"))
}
/// Handles parsing of a `try` statement. A `try` statement is simpler
/// than an `if` as the syntactic form is:
///
/// ```wat
/// (try (do $do) (catch $event $catch))
/// ```
///
/// where the `do` and `catch` keywords are mandatory, even for an empty
/// $do or $catch.
///
/// Returns `true` if the rest of the arm above should be skipped, or
/// `false` if we should parse the next item as an instruction (because we
/// didn't handle the lparen here).
fn handle_try_lparen(&mut self, parser: Parser<'a>) -> Result<bool> {
// Only execute the code below if there's a `Try` listed last.
let i = match self.stack.last_mut() {
Some(Level::Try(i)) => i,
_ => return Ok(false),
};
// Try statements must start with a `do` block.
if let Try::Do(try_instr) = i {
let instr = mem::replace(try_instr, Instruction::End(None));
self.instrs.push(instr);
if parser.parse::<Option<kw::r#do>>()?.is_some() {
// The state is advanced here only if the parse succeeds in
// order to strictly require the keyword.
*i = Try::CatchOrUnwind;
self.stack.push(Level::TryArm);
return Ok(true);
}
// We return here and continue parsing instead of raising an error
// immediately because the missing keyword will be caught more
// generally in the `Paren::Right` case in `parse`.
return Ok(false);
}
// After a try's `do`, there are several possible kinds of handlers.
if let Try::CatchOrUnwind = i {
// `catch` may be followed by more `catch`s or `catch_all`.
if parser.parse::<Option<kw::catch>>()?.is_some() {
let evt = parser.parse::<ast::Index<'a>>()?;
self.instrs.push(Instruction::Catch(evt));
*i = Try::Catch;
self.stack.push(Level::TryArm);
return Ok(true);
}
// `catch_all` can only come at the end and has no argument.
if parser.parse::<Option<kw::catch_all>>()?.is_some() {
self.instrs.push(Instruction::CatchAll);
*i = Try::End;
self.stack.push(Level::TryArm);
return Ok(true);
}
// `unwind` is similar to `catch_all`.
if parser.parse::<Option<kw::unwind>>()?.is_some() {
self.instrs.push(Instruction::Unwind);
*i = Try::End;
self.stack.push(Level::TryArm);
return Ok(true);
}
return Ok(false);
}
if let Try::Catch = i {
if parser.parse::<Option<kw::catch>>()?.is_some() {
let evt = parser.parse::<ast::Index<'a>>()?;
self.instrs.push(Instruction::Catch(evt));
*i = Try::Catch;
self.stack.push(Level::TryArm);
return Ok(true);
}
if parser.parse::<Option<kw::catch_all>>()?.is_some() {
self.instrs.push(Instruction::CatchAll);
*i = Try::End;
self.stack.push(Level::TryArm);
return Ok(true);
}
return Err(parser.error("unexpected items after `catch`"));
}
Err(parser.error("too many payloads inside of `(try)`"))
}
}
// TODO: document this obscenity
macro_rules! instructions {
(pub enum Instruction<'a> {
$(
$(#[$doc:meta])*
$name:ident $(($($arg:tt)*))? : [$($binary:tt)*] : $instr:tt $( | $deprecated:tt )?,
)*
}) => (
/// A listing of all WebAssembly instructions that can be in a module
/// that this crate currently parses.
#[derive(Debug)]
#[allow(missing_docs)]
pub enum Instruction<'a> {
$(
$(#[$doc])*
$name $(( instructions!(@ty $($arg)*) ))?,
)*
}
#[allow(non_snake_case)]
impl<'a> Parse<'a> for Instruction<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
$(
fn $name<'a>(_parser: Parser<'a>) -> Result<Instruction<'a>> {
Ok(Instruction::$name $((
instructions!(@parse _parser $($arg)*)?
))?)
}
)*
let parse_remainder = parser.step(|c| {
let (kw, rest) = match c.keyword() {
Some(pair) => pair,
None => return Err(c.error("expected an instruction")),
};
match kw {
$($instr $( | $deprecated )?=> Ok(($name as fn(_) -> _, rest)),)*
_ => return Err(c.error("unknown operator or unexpected token")),
}
})?;
parse_remainder(parser)
}
}
impl crate::binary::Encode for Instruction<'_> {
#[allow(non_snake_case)]
fn encode(&self, v: &mut Vec<u8>) {
match self {
$(
Instruction::$name $((instructions!(@first $($arg)*)))? => {
fn encode<'a>($(arg: &instructions!(@ty $($arg)*),)? v: &mut Vec<u8>) {
instructions!(@encode v $($binary)*);
$(<instructions!(@ty $($arg)*) as crate::binary::Encode>::encode(arg, v);)?
}
encode($( instructions!(@first $($arg)*), )? v)
}
)*
}
}
}
);
(@ty MemArg<$amt:tt>) => (MemArg<'a>);
(@ty $other:ty) => ($other);
(@first $first:ident $($t:tt)*) => ($first);
(@parse $parser:ident MemArg<$amt:tt>) => (MemArg::parse($parser, $amt));
(@parse $parser:ident MemArg) => (compile_error!("must specify `MemArg` default"));
(@parse $parser:ident $other:ty) => ($parser.parse::<$other>());
// simd opcodes prefixed with `0xfd` get a varuint32 encoding for their payload
(@encode $dst:ident 0xfd, $simd:tt) => ({
$dst.push(0xfd);
<u32 as crate::binary::Encode>::encode(&$simd, $dst);
});
(@encode $dst:ident $($bytes:tt)*) => ($dst.extend_from_slice(&[$($bytes)*]););
}
instructions! {
pub enum Instruction<'a> {
Block(BlockType<'a>) : [0x02] : "block",
If(BlockType<'a>) : [0x04] : "if",
Else(Option<ast::Id<'a>>) : [0x05] : "else",
Loop(BlockType<'a>) : [0x03] : "loop",
End(Option<ast::Id<'a>>) : [0x0b] : "end",
Unreachable : [0x00] : "unreachable",
Nop : [0x01] : "nop",
Br(ast::Index<'a>) : [0x0c] : "br",
BrIf(ast::Index<'a>) : [0x0d] : "br_if",
BrTable(BrTableIndices<'a>) : [0x0e] : "br_table",
Return : [0x0f] : "return",
Call(ast::Index<'a>) : [0x10] : "call",
CallIndirect(CallIndirect<'a>) : [0x11] : "call_indirect",
// tail-call proposal
ReturnCall(ast::Index<'a>) : [0x12] : "return_call",
ReturnCallIndirect(CallIndirect<'a>) : [0x13] : "return_call_indirect",
// function-references proposal
CallRef : [0x14] : "call_ref",
ReturnCallRef : [0x15] : "return_call_ref",
FuncBind(FuncBindType<'a>) : [0x16] : "func.bind",
Let(LetType<'a>) : [0x17] : "let",
Drop : [0x1a] : "drop",
Select(SelectTypes<'a>) : [] : "select",
LocalGet(ast::Index<'a>) : [0x20] : "local.get" | "get_local",
LocalSet(ast::Index<'a>) : [0x21] : "local.set" | "set_local",
LocalTee(ast::Index<'a>) : [0x22] : "local.tee" | "tee_local",
GlobalGet(ast::Index<'a>) : [0x23] : "global.get" | "get_global",
GlobalSet(ast::Index<'a>) : [0x24] : "global.set" | "set_global",
TableGet(TableArg<'a>) : [0x25] : "table.get",
TableSet(TableArg<'a>) : [0x26] : "table.set",
I32Load(MemArg<4>) : [0x28] : "i32.load",
I64Load(MemArg<8>) : [0x29] : "i64.load",
F32Load(MemArg<4>) : [0x2a] : "f32.load",
F64Load(MemArg<8>) : [0x2b] : "f64.load",
I32Load8s(MemArg<1>) : [0x2c] : "i32.load8_s",
I32Load8u(MemArg<1>) : [0x2d] : "i32.load8_u",
I32Load16s(MemArg<2>) : [0x2e] : "i32.load16_s",
I32Load16u(MemArg<2>) : [0x2f] : "i32.load16_u",
I64Load8s(MemArg<1>) : [0x30] : "i64.load8_s",
I64Load8u(MemArg<1>) : [0x31] : "i64.load8_u",
I64Load16s(MemArg<2>) : [0x32] : "i64.load16_s",
I64Load16u(MemArg<2>) : [0x33] : "i64.load16_u",
I64Load32s(MemArg<4>) : [0x34] : "i64.load32_s",
I64Load32u(MemArg<4>) : [0x35] : "i64.load32_u",
I32Store(MemArg<4>) : [0x36] : "i32.store",
I64Store(MemArg<8>) : [0x37] : "i64.store",
F32Store(MemArg<4>) : [0x38] : "f32.store",
F64Store(MemArg<8>) : [0x39] : "f64.store",
I32Store8(MemArg<1>) : [0x3a] : "i32.store8",
I32Store16(MemArg<2>) : [0x3b] : "i32.store16",
I64Store8(MemArg<1>) : [0x3c] : "i64.store8",
I64Store16(MemArg<2>) : [0x3d] : "i64.store16",
I64Store32(MemArg<4>) : [0x3e] : "i64.store32",
// Lots of bulk memory proposal here as well
MemorySize(MemoryArg<'a>) : [0x3f] : "memory.size" | "current_memory",
MemoryGrow(MemoryArg<'a>) : [0x40] : "memory.grow" | "grow_memory",
MemoryInit(MemoryInit<'a>) : [0xfc, 0x08] : "memory.init",
MemoryCopy(MemoryCopy<'a>) : [0xfc, 0x0a] : "memory.copy",
MemoryFill(MemoryArg<'a>) : [0xfc, 0x0b] : "memory.fill",
DataDrop(ast::Index<'a>) : [0xfc, 0x09] : "data.drop",
ElemDrop(ast::Index<'a>) : [0xfc, 0x0d] : "elem.drop",
TableInit(TableInit<'a>) : [0xfc, 0x0c] : "table.init",
TableCopy(TableCopy<'a>) : [0xfc, 0x0e] : "table.copy",
TableFill(TableArg<'a>) : [0xfc, 0x11] : "table.fill",
TableSize(TableArg<'a>) : [0xfc, 0x10] : "table.size",
TableGrow(TableArg<'a>) : [0xfc, 0x0f] : "table.grow",
RefNull(HeapType<'a>) : [0xd0] : "ref.null",
RefIsNull : [0xd1] : "ref.is_null",
RefExtern(u32) : [0xff] : "ref.extern", // only used in test harness
RefFunc(ast::Index<'a>) : [0xd2] : "ref.func",
// function-references proposal
RefAsNonNull : [0xd3] : "ref.as_non_null",
BrOnNull(ast::Index<'a>) : [0xd4] : "br_on_null",
// gc proposal: eqref
RefEq : [0xd5] : "ref.eq",
// gc proposal (moz specific, will be removed)
StructNew(ast::Index<'a>) : [0xfb, 0x0] : "struct.new",
// gc proposal: struct
StructNewWithRtt(ast::Index<'a>) : [0xfb, 0x01] : "struct.new_with_rtt",
StructNewDefaultWithRtt(ast::Index<'a>) : [0xfb, 0x02] : "struct.new_default_with_rtt",
StructGet(StructAccess<'a>) : [0xfb, 0x03] : "struct.get",
StructGetS(StructAccess<'a>) : [0xfb, 0x04] : "struct.get_s",
StructGetU(StructAccess<'a>) : [0xfb, 0x05] : "struct.get_u",
StructSet(StructAccess<'a>) : [0xfb, 0x06] : "struct.set",
// gc proposal (moz specific, will be removed)
StructNarrow(StructNarrow<'a>) : [0xfb, 0x07] : "struct.narrow",
// gc proposal: array
ArrayNewWithRtt(ast::Index<'a>) : [0xfb, 0x11] : "array.new_with_rtt",
ArrayNewDefaultWithRtt(ast::Index<'a>) : [0xfb, 0x12] : "array.new_default_with_rtt",
ArrayGet(ast::Index<'a>) : [0xfb, 0x13] : "array.get",
ArrayGetS(ast::Index<'a>) : [0xfb, 0x14] : "array.get_s",
ArrayGetU(ast::Index<'a>) : [0xfb, 0x15] : "array.get_u",
ArraySet(ast::Index<'a>) : [0xfb, 0x16] : "array.set",
ArrayLen(ast::Index<'a>) : [0xfb, 0x17] : "array.len",
// gc proposal, i31
I31New : [0xfb, 0x20] : "i31.new",
I31GetS : [0xfb, 0x21] : "i31.get_s",
I31GetU : [0xfb, 0x22] : "i31.get_u",
// gc proposal, rtt/casting
RTTCanon(HeapType<'a>) : [0xfb, 0x30] : "rtt.canon",
RTTSub(RTTSub<'a>) : [0xfb, 0x31] : "rtt.sub",
RefTest(RefTest<'a>) : [0xfb, 0x40] : "ref.test",
RefCast(RefTest<'a>) : [0xfb, 0x41] : "ref.cast",
BrOnCast(BrOnCast<'a>) : [0xfb, 0x42] : "br_on_cast",
I32Const(i32) : [0x41] : "i32.const",
I64Const(i64) : [0x42] : "i64.const",
F32Const(ast::Float32) : [0x43] : "f32.const",
F64Const(ast::Float64) : [0x44] : "f64.const",
I32Clz : [0x67] : "i32.clz",
I32Ctz : [0x68] : "i32.ctz",
I32Popcnt : [0x69] : "i32.popcnt",
I32Add : [0x6a] : "i32.add",
I32Sub : [0x6b] : "i32.sub",
I32Mul : [0x6c] : "i32.mul",
I32DivS : [0x6d] : "i32.div_s",
I32DivU : [0x6e] : "i32.div_u",
I32RemS : [0x6f] : "i32.rem_s",
I32RemU : [0x70] : "i32.rem_u",
I32And : [0x71] : "i32.and",
I32Or : [0x72] : "i32.or",
I32Xor : [0x73] : "i32.xor",
I32Shl : [0x74] : "i32.shl",
I32ShrS : [0x75] : "i32.shr_s",
I32ShrU : [0x76] : "i32.shr_u",
I32Rotl : [0x77] : "i32.rotl",
I32Rotr : [0x78] : "i32.rotr",
I64Clz : [0x79] : "i64.clz",
I64Ctz : [0x7a] : "i64.ctz",
I64Popcnt : [0x7b] : "i64.popcnt",
I64Add : [0x7c] : "i64.add",
I64Sub : [0x7d] : "i64.sub",
I64Mul : [0x7e] : "i64.mul",
I64DivS : [0x7f] : "i64.div_s",
I64DivU : [0x80] : "i64.div_u",
I64RemS : [0x81] : "i64.rem_s",
I64RemU : [0x82] : "i64.rem_u",
I64And : [0x83] : "i64.and",
I64Or : [0x84] : "i64.or",
I64Xor : [0x85] : "i64.xor",
I64Shl : [0x86] : "i64.shl",
I64ShrS : [0x87] : "i64.shr_s",
I64ShrU : [0x88] : "i64.shr_u",
I64Rotl : [0x89] : "i64.rotl",
I64Rotr : [0x8a] : "i64.rotr",
F32Abs : [0x8b] : "f32.abs",
F32Neg : [0x8c] : "f32.neg",
F32Ceil : [0x8d] : "f32.ceil",
F32Floor : [0x8e] : "f32.floor",
F32Trunc : [0x8f] : "f32.trunc",
F32Nearest : [0x90] : "f32.nearest",
F32Sqrt : [0x91] : "f32.sqrt",
F32Add : [0x92] : "f32.add",
F32Sub : [0x93] : "f32.sub",
F32Mul : [0x94] : "f32.mul",
F32Div : [0x95] : "f32.div",
F32Min : [0x96] : "f32.min",
F32Max : [0x97] : "f32.max",
F32Copysign : [0x98] : "f32.copysign",
F64Abs : [0x99] : "f64.abs",
F64Neg : [0x9a] : "f64.neg",
F64Ceil : [0x9b] : "f64.ceil",
F64Floor : [0x9c] : "f64.floor",
F64Trunc : [0x9d] : "f64.trunc",
F64Nearest : [0x9e] : "f64.nearest",
F64Sqrt : [0x9f] : "f64.sqrt",
F64Add : [0xa0] : "f64.add",
F64Sub : [0xa1] : "f64.sub",
F64Mul : [0xa2] : "f64.mul",
F64Div : [0xa3] : "f64.div",
F64Min : [0xa4] : "f64.min",
F64Max : [0xa5] : "f64.max",
F64Copysign : [0xa6] : "f64.copysign",
I32Eqz : [0x45] : "i32.eqz",
I32Eq : [0x46] : "i32.eq",
I32Ne : [0x47] : "i32.ne",
I32LtS : [0x48] : "i32.lt_s",
I32LtU : [0x49] : "i32.lt_u",
I32GtS : [0x4a] : "i32.gt_s",
I32GtU : [0x4b] : "i32.gt_u",
I32LeS : [0x4c] : "i32.le_s",
I32LeU : [0x4d] : "i32.le_u",
I32GeS : [0x4e] : "i32.ge_s",
I32GeU : [0x4f] : "i32.ge_u",
I64Eqz : [0x50] : "i64.eqz",
I64Eq : [0x51] : "i64.eq",
I64Ne : [0x52] : "i64.ne",
I64LtS : [0x53] : "i64.lt_s",
I64LtU : [0x54] : "i64.lt_u",
I64GtS : [0x55] : "i64.gt_s",
I64GtU : [0x56] : "i64.gt_u",
I64LeS : [0x57] : "i64.le_s",
I64LeU : [0x58] : "i64.le_u",
I64GeS : [0x59] : "i64.ge_s",
I64GeU : [0x5a] : "i64.ge_u",
F32Eq : [0x5b] : "f32.eq",
F32Ne : [0x5c] : "f32.ne",
F32Lt : [0x5d] : "f32.lt",
F32Gt : [0x5e] : "f32.gt",
F32Le : [0x5f] : "f32.le",
F32Ge : [0x60] : "f32.ge",
F64Eq : [0x61] : "f64.eq",
F64Ne : [0x62] : "f64.ne",
F64Lt : [0x63] : "f64.lt",
F64Gt : [0x64] : "f64.gt",
F64Le : [0x65] : "f64.le",
F64Ge : [0x66] : "f64.ge",
I32WrapI64 : [0xa7] : "i32.wrap_i64" | "i32.wrap/i64",
I32TruncF32S : [0xa8] : "i32.trunc_f32_s" | "i32.trunc_s/f32",
I32TruncF32U : [0xa9] : "i32.trunc_f32_u" | "i32.trunc_u/f32",
I32TruncF64S : [0xaa] : "i32.trunc_f64_s" | "i32.trunc_s/f64",
I32TruncF64U : [0xab] : "i32.trunc_f64_u" | "i32.trunc_u/f64",
I64ExtendI32S : [0xac] : "i64.extend_i32_s" | "i64.extend_s/i32",
I64ExtendI32U : [0xad] : "i64.extend_i32_u" | "i64.extend_u/i32",
I64TruncF32S : [0xae] : "i64.trunc_f32_s" | "i64.trunc_s/f32",
I64TruncF32U : [0xaf] : "i64.trunc_f32_u" | "i64.trunc_u/f32",
I64TruncF64S : [0xb0] : "i64.trunc_f64_s" | "i64.trunc_s/f64",
I64TruncF64U : [0xb1] : "i64.trunc_f64_u" | "i64.trunc_u/f64",
F32ConvertI32S : [0xb2] : "f32.convert_i32_s" | "f32.convert_s/i32",
F32ConvertI32U : [0xb3] : "f32.convert_i32_u" | "f32.convert_u/i32",
F32ConvertI64S : [0xb4] : "f32.convert_i64_s" | "f32.convert_s/i64",
F32ConvertI64U : [0xb5] : "f32.convert_i64_u" | "f32.convert_u/i64",
F32DemoteF64 : [0xb6] : "f32.demote_f64" | "f32.demote/f64",
F64ConvertI32S : [0xb7] : "f64.convert_i32_s" | "f64.convert_s/i32",
F64ConvertI32U : [0xb8] : "f64.convert_i32_u" | "f64.convert_u/i32",
F64ConvertI64S : [0xb9] : "f64.convert_i64_s" | "f64.convert_s/i64",
F64ConvertI64U : [0xba] : "f64.convert_i64_u" | "f64.convert_u/i64",
F64PromoteF32 : [0xbb] : "f64.promote_f32" | "f64.promote/f32",
I32ReinterpretF32 : [0xbc] : "i32.reinterpret_f32" | "i32.reinterpret/f32",
I64ReinterpretF64 : [0xbd] : "i64.reinterpret_f64" | "i64.reinterpret/f64",
F32ReinterpretI32 : [0xbe] : "f32.reinterpret_i32" | "f32.reinterpret/i32",
F64ReinterpretI64 : [0xbf] : "f64.reinterpret_i64" | "f64.reinterpret/i64",
// non-trapping float to int
I32TruncSatF32S : [0xfc, 0x00] : "i32.trunc_sat_f32_s" | "i32.trunc_s:sat/f32",
I32TruncSatF32U : [0xfc, 0x01] : "i32.trunc_sat_f32_u" | "i32.trunc_u:sat/f32",
I32TruncSatF64S : [0xfc, 0x02] : "i32.trunc_sat_f64_s" | "i32.trunc_s:sat/f64",
I32TruncSatF64U : [0xfc, 0x03] : "i32.trunc_sat_f64_u" | "i32.trunc_u:sat/f64",
I64TruncSatF32S : [0xfc, 0x04] : "i64.trunc_sat_f32_s" | "i64.trunc_s:sat/f32",
I64TruncSatF32U : [0xfc, 0x05] : "i64.trunc_sat_f32_u" | "i64.trunc_u:sat/f32",
I64TruncSatF64S : [0xfc, 0x06] : "i64.trunc_sat_f64_s" | "i64.trunc_s:sat/f64",
I64TruncSatF64U : [0xfc, 0x07] : "i64.trunc_sat_f64_u" | "i64.trunc_u:sat/f64",
// sign extension proposal
I32Extend8S : [0xc0] : "i32.extend8_s",
I32Extend16S : [0xc1] : "i32.extend16_s",
I64Extend8S : [0xc2] : "i64.extend8_s",
I64Extend16S : [0xc3] : "i64.extend16_s",
I64Extend32S : [0xc4] : "i64.extend32_s",
// atomics proposal
MemoryAtomicNotify(MemArg<4>) : [0xfe, 0x00] : "memory.atomic.notify" | "atomic.notify",
MemoryAtomicWait32(MemArg<4>) : [0xfe, 0x01] : "memory.atomic.wait32" | "i32.atomic.wait",
MemoryAtomicWait64(MemArg<8>) : [0xfe, 0x02] : "memory.atomic.wait64" | "i64.atomic.wait",
AtomicFence : [0xfe, 0x03, 0x00] : "atomic.fence",
I32AtomicLoad(MemArg<4>) : [0xfe, 0x10] : "i32.atomic.load",
I64AtomicLoad(MemArg<8>) : [0xfe, 0x11] : "i64.atomic.load",
I32AtomicLoad8u(MemArg<1>) : [0xfe, 0x12] : "i32.atomic.load8_u",
I32AtomicLoad16u(MemArg<2>) : [0xfe, 0x13] : "i32.atomic.load16_u",
I64AtomicLoad8u(MemArg<1>) : [0xfe, 0x14] : "i64.atomic.load8_u",
I64AtomicLoad16u(MemArg<2>) : [0xfe, 0x15] : "i64.atomic.load16_u",
I64AtomicLoad32u(MemArg<4>) : [0xfe, 0x16] : "i64.atomic.load32_u",
I32AtomicStore(MemArg<4>) : [0xfe, 0x17] : "i32.atomic.store",
I64AtomicStore(MemArg<8>) : [0xfe, 0x18] : "i64.atomic.store",
I32AtomicStore8(MemArg<1>) : [0xfe, 0x19] : "i32.atomic.store8",
I32AtomicStore16(MemArg<2>) : [0xfe, 0x1a] : "i32.atomic.store16",
I64AtomicStore8(MemArg<1>) : [0xfe, 0x1b] : "i64.atomic.store8",
I64AtomicStore16(MemArg<2>) : [0xfe, 0x1c] : "i64.atomic.store16",
I64AtomicStore32(MemArg<4>) : [0xfe, 0x1d] : "i64.atomic.store32",
I32AtomicRmwAdd(MemArg<4>) : [0xfe, 0x1e] : "i32.atomic.rmw.add",
I64AtomicRmwAdd(MemArg<8>) : [0xfe, 0x1f] : "i64.atomic.rmw.add",
I32AtomicRmw8AddU(MemArg<1>) : [0xfe, 0x20] : "i32.atomic.rmw8.add_u",
I32AtomicRmw16AddU(MemArg<2>) : [0xfe, 0x21] : "i32.atomic.rmw16.add_u",
I64AtomicRmw8AddU(MemArg<1>) : [0xfe, 0x22] : "i64.atomic.rmw8.add_u",
I64AtomicRmw16AddU(MemArg<2>) : [0xfe, 0x23] : "i64.atomic.rmw16.add_u",
I64AtomicRmw32AddU(MemArg<4>) : [0xfe, 0x24] : "i64.atomic.rmw32.add_u",
I32AtomicRmwSub(MemArg<4>) : [0xfe, 0x25] : "i32.atomic.rmw.sub",
I64AtomicRmwSub(MemArg<8>) : [0xfe, 0x26] : "i64.atomic.rmw.sub",
I32AtomicRmw8SubU(MemArg<1>) : [0xfe, 0x27] : "i32.atomic.rmw8.sub_u",
I32AtomicRmw16SubU(MemArg<2>) : [0xfe, 0x28] : "i32.atomic.rmw16.sub_u",
I64AtomicRmw8SubU(MemArg<1>) : [0xfe, 0x29] : "i64.atomic.rmw8.sub_u",
I64AtomicRmw16SubU(MemArg<2>) : [0xfe, 0x2a] : "i64.atomic.rmw16.sub_u",
I64AtomicRmw32SubU(MemArg<4>) : [0xfe, 0x2b] : "i64.atomic.rmw32.sub_u",
I32AtomicRmwAnd(MemArg<4>) : [0xfe, 0x2c] : "i32.atomic.rmw.and",
I64AtomicRmwAnd(MemArg<8>) : [0xfe, 0x2d] : "i64.atomic.rmw.and",
I32AtomicRmw8AndU(MemArg<1>) : [0xfe, 0x2e] : "i32.atomic.rmw8.and_u",
I32AtomicRmw16AndU(MemArg<2>) : [0xfe, 0x2f] : "i32.atomic.rmw16.and_u",
I64AtomicRmw8AndU(MemArg<1>) : [0xfe, 0x30] : "i64.atomic.rmw8.and_u",
I64AtomicRmw16AndU(MemArg<2>) : [0xfe, 0x31] : "i64.atomic.rmw16.and_u",
I64AtomicRmw32AndU(MemArg<4>) : [0xfe, 0x32] : "i64.atomic.rmw32.and_u",
I32AtomicRmwOr(MemArg<4>) : [0xfe, 0x33] : "i32.atomic.rmw.or",
I64AtomicRmwOr(MemArg<8>) : [0xfe, 0x34] : "i64.atomic.rmw.or",
I32AtomicRmw8OrU(MemArg<1>) : [0xfe, 0x35] : "i32.atomic.rmw8.or_u",
I32AtomicRmw16OrU(MemArg<2>) : [0xfe, 0x36] : "i32.atomic.rmw16.or_u",
I64AtomicRmw8OrU(MemArg<1>) : [0xfe, 0x37] : "i64.atomic.rmw8.or_u",
I64AtomicRmw16OrU(MemArg<2>) : [0xfe, 0x38] : "i64.atomic.rmw16.or_u",
I64AtomicRmw32OrU(MemArg<4>) : [0xfe, 0x39] : "i64.atomic.rmw32.or_u",
I32AtomicRmwXor(MemArg<4>) : [0xfe, 0x3a] : "i32.atomic.rmw.xor",
I64AtomicRmwXor(MemArg<8>) : [0xfe, 0x3b] : "i64.atomic.rmw.xor",
I32AtomicRmw8XorU(MemArg<1>) : [0xfe, 0x3c] : "i32.atomic.rmw8.xor_u",
I32AtomicRmw16XorU(MemArg<2>) : [0xfe, 0x3d] : "i32.atomic.rmw16.xor_u",
I64AtomicRmw8XorU(MemArg<1>) : [0xfe, 0x3e] : "i64.atomic.rmw8.xor_u",
I64AtomicRmw16XorU(MemArg<2>) : [0xfe, 0x3f] : "i64.atomic.rmw16.xor_u",
I64AtomicRmw32XorU(MemArg<4>) : [0xfe, 0x40] : "i64.atomic.rmw32.xor_u",
I32AtomicRmwXchg(MemArg<4>) : [0xfe, 0x41] : "i32.atomic.rmw.xchg",
I64AtomicRmwXchg(MemArg<8>) : [0xfe, 0x42] : "i64.atomic.rmw.xchg",
I32AtomicRmw8XchgU(MemArg<1>) : [0xfe, 0x43] : "i32.atomic.rmw8.xchg_u",
I32AtomicRmw16XchgU(MemArg<2>) : [0xfe, 0x44] : "i32.atomic.rmw16.xchg_u",
I64AtomicRmw8XchgU(MemArg<1>) : [0xfe, 0x45] : "i64.atomic.rmw8.xchg_u",
I64AtomicRmw16XchgU(MemArg<2>) : [0xfe, 0x46] : "i64.atomic.rmw16.xchg_u",
I64AtomicRmw32XchgU(MemArg<4>) : [0xfe, 0x47] : "i64.atomic.rmw32.xchg_u",
I32AtomicRmwCmpxchg(MemArg<4>) : [0xfe, 0x48] : "i32.atomic.rmw.cmpxchg",
I64AtomicRmwCmpxchg(MemArg<8>) : [0xfe, 0x49] : "i64.atomic.rmw.cmpxchg",
I32AtomicRmw8CmpxchgU(MemArg<1>) : [0xfe, 0x4a] : "i32.atomic.rmw8.cmpxchg_u",
I32AtomicRmw16CmpxchgU(MemArg<2>) : [0xfe, 0x4b] : "i32.atomic.rmw16.cmpxchg_u",
I64AtomicRmw8CmpxchgU(MemArg<1>) : [0xfe, 0x4c] : "i64.atomic.rmw8.cmpxchg_u",
I64AtomicRmw16CmpxchgU(MemArg<2>) : [0xfe, 0x4d] : "i64.atomic.rmw16.cmpxchg_u",
I64AtomicRmw32CmpxchgU(MemArg<4>) : [0xfe, 0x4e] : "i64.atomic.rmw32.cmpxchg_u",
// proposal: simd
V128Load(MemArg<16>) : [0xfd, 0x00] : "v128.load",
V128Load8x8S(MemArg<8>) : [0xfd, 0x01] : "v128.load8x8_s",
V128Load8x8U(MemArg<8>) : [0xfd, 0x02] : "v128.load8x8_u",
V128Load16x4S(MemArg<8>) : [0xfd, 0x03] : "v128.load16x4_s",
V128Load16x4U(MemArg<8>) : [0xfd, 0x04] : "v128.load16x4_u",
V128Load32x2S(MemArg<8>) : [0xfd, 0x05] : "v128.load32x2_s",
V128Load32x2U(MemArg<8>) : [0xfd, 0x06] : "v128.load32x2_u",
V128Load8Splat(MemArg<1>) : [0xfd, 0x07] : "v128.load8_splat",
V128Load16Splat(MemArg<2>) : [0xfd, 0x08] : "v128.load16_splat",
V128Load32Splat(MemArg<4>) : [0xfd, 0x09] : "v128.load32_splat",
V128Load64Splat(MemArg<8>) : [0xfd, 0x0a] : "v128.load64_splat",
V128Store(MemArg<16>) : [0xfd, 0x0b] : "v128.store",
V128Const(V128Const) : [0xfd, 0x0c] : "v128.const",
I8x16Shuffle(I8x16Shuffle) : [0xfd, 0x0d] : "i8x16.shuffle",
I8x16Swizzle : [0xfd, 0x0e] : "i8x16.swizzle",
I8x16Splat : [0xfd, 0x0f] : "i8x16.splat",
I16x8Splat : [0xfd, 0x10] : "i16x8.splat",
I32x4Splat : [0xfd, 0x11] : "i32x4.splat",
I64x2Splat : [0xfd, 0x12] : "i64x2.splat",
F32x4Splat : [0xfd, 0x13] : "f32x4.splat",
F64x2Splat : [0xfd, 0x14] : "f64x2.splat",
I8x16ExtractLaneS(LaneArg) : [0xfd, 0x15] : "i8x16.extract_lane_s",
I8x16ExtractLaneU(LaneArg) : [0xfd, 0x16] : "i8x16.extract_lane_u",
I8x16ReplaceLane(LaneArg) : [0xfd, 0x17] : "i8x16.replace_lane",
I16x8ExtractLaneS(LaneArg) : [0xfd, 0x18] : "i16x8.extract_lane_s",
I16x8ExtractLaneU(LaneArg) : [0xfd, 0x19] : "i16x8.extract_lane_u",
I16x8ReplaceLane(LaneArg) : [0xfd, 0x1a] : "i16x8.replace_lane",
I32x4ExtractLane(LaneArg) : [0xfd, 0x1b] : "i32x4.extract_lane",
I32x4ReplaceLane(LaneArg) : [0xfd, 0x1c] : "i32x4.replace_lane",
I64x2ExtractLane(LaneArg) : [0xfd, 0x1d] : "i64x2.extract_lane",
I64x2ReplaceLane(LaneArg) : [0xfd, 0x1e] : "i64x2.replace_lane",
F32x4ExtractLane(LaneArg) : [0xfd, 0x1f] : "f32x4.extract_lane",
F32x4ReplaceLane(LaneArg) : [0xfd, 0x20] : "f32x4.replace_lane",
F64x2ExtractLane(LaneArg) : [0xfd, 0x21] : "f64x2.extract_lane",
F64x2ReplaceLane(LaneArg) : [0xfd, 0x22] : "f64x2.replace_lane",
I8x16Eq : [0xfd, 0x23] : "i8x16.eq",
I8x16Ne : [0xfd, 0x24] : "i8x16.ne",
I8x16LtS : [0xfd, 0x25] : "i8x16.lt_s",
I8x16LtU : [0xfd, 0x26] : "i8x16.lt_u",
I8x16GtS : [0xfd, 0x27] : "i8x16.gt_s",
I8x16GtU : [0xfd, 0x28] : "i8x16.gt_u",
I8x16LeS : [0xfd, 0x29] : "i8x16.le_s",
I8x16LeU : [0xfd, 0x2a] : "i8x16.le_u",
I8x16GeS : [0xfd, 0x2b] : "i8x16.ge_s",
I8x16GeU : [0xfd, 0x2c] : "i8x16.ge_u",
I16x8Eq : [0xfd, 0x2d] : "i16x8.eq",
I16x8Ne : [0xfd, 0x2e] : "i16x8.ne",
I16x8LtS : [0xfd, 0x2f] : "i16x8.lt_s",
I16x8LtU : [0xfd, 0x30] : "i16x8.lt_u",
I16x8GtS : [0xfd, 0x31] : "i16x8.gt_s",
I16x8GtU : [0xfd, 0x32] : "i16x8.gt_u",
I16x8LeS : [0xfd, 0x33] : "i16x8.le_s",
I16x8LeU : [0xfd, 0x34] : "i16x8.le_u",
I16x8GeS : [0xfd, 0x35] : "i16x8.ge_s",
I16x8GeU : [0xfd, 0x36] : "i16x8.ge_u",
I32x4Eq : [0xfd, 0x37] : "i32x4.eq",
I32x4Ne : [0xfd, 0x38] : "i32x4.ne",
I32x4LtS : [0xfd, 0x39] : "i32x4.lt_s",
I32x4LtU : [0xfd, 0x3a] : "i32x4.lt_u",
I32x4GtS : [0xfd, 0x3b] : "i32x4.gt_s",
I32x4GtU : [0xfd, 0x3c] : "i32x4.gt_u",
I32x4LeS : [0xfd, 0x3d] : "i32x4.le_s",
I32x4LeU : [0xfd, 0x3e] : "i32x4.le_u",
I32x4GeS : [0xfd, 0x3f] : "i32x4.ge_s",
I32x4GeU : [0xfd, 0x40] : "i32x4.ge_u",
F32x4Eq : [0xfd, 0x41] : "f32x4.eq",
F32x4Ne : [0xfd, 0x42] : "f32x4.ne",
F32x4Lt : [0xfd, 0x43] : "f32x4.lt",
F32x4Gt : [0xfd, 0x44] : "f32x4.gt",
F32x4Le : [0xfd, 0x45] : "f32x4.le",
F32x4Ge : [0xfd, 0x46] : "f32x4.ge",
F64x2Eq : [0xfd, 0x47] : "f64x2.eq",
F64x2Ne : [0xfd, 0x48] : "f64x2.ne",
F64x2Lt : [0xfd, 0x49] : "f64x2.lt",
F64x2Gt : [0xfd, 0x4a] : "f64x2.gt",
F64x2Le : [0xfd, 0x4b] : "f64x2.le",
F64x2Ge : [0xfd, 0x4c] : "f64x2.ge",
V128Not : [0xfd, 0x4d] : "v128.not",
V128And : [0xfd, 0x4e] : "v128.and",
V128Andnot : [0xfd, 0x4f] : "v128.andnot",
V128Or : [0xfd, 0x50] : "v128.or",
V128Xor : [0xfd, 0x51] : "v128.xor",
V128Bitselect : [0xfd, 0x52] : "v128.bitselect",
I8x16Abs : [0xfd, 0x60] : "i8x16.abs",
I8x16Neg : [0xfd, 0x61] : "i8x16.neg",
I8x16AnyTrue : [0xfd, 0x62] : "i8x16.any_true",
I8x16AllTrue : [0xfd, 0x63] : "i8x16.all_true",
I8x16Bitmask : [0xfd, 0x64] : "i8x16.bitmask",
I8x16NarrowI16x8S : [0xfd, 0x65] : "i8x16.narrow_i16x8_s",
I8x16NarrowI16x8U : [0xfd, 0x66] : "i8x16.narrow_i16x8_u",
I8x16Shl : [0xfd, 0x6b] : "i8x16.shl",
I8x16ShrS : [0xfd, 0x6c] : "i8x16.shr_s",
I8x16ShrU : [0xfd, 0x6d] : "i8x16.shr_u",
I8x16Add : [0xfd, 0x6e] : "i8x16.add",
I8x16AddSatS : [0xfd, 0x6f] : "i8x16.add_sat_s",
I8x16AddSatU : [0xfd, 0x70] : "i8x16.add_sat_u",
I8x16Sub : [0xfd, 0x71] : "i8x16.sub",
I8x16SubSatS : [0xfd, 0x72] : "i8x16.sub_sat_s",
I8x16SubSatU : [0xfd, 0x73] : "i8x16.sub_sat_u",
I8x16MinS : [0xfd, 0x76] : "i8x16.min_s",
I8x16MinU : [0xfd, 0x77] : "i8x16.min_u",
I8x16MaxS : [0xfd, 0x78] : "i8x16.max_s",
I8x16MaxU : [0xfd, 0x79] : "i8x16.max_u",
I8x16AvgrU : [0xfd, 0x7b] : "i8x16.avgr_u",
I16x8Abs : [0xfd, 0x80] : "i16x8.abs",
I16x8Neg : [0xfd, 0x81] : "i16x8.neg",
I16x8AnyTrue : [0xfd, 0x82] : "i16x8.any_true",
I16x8AllTrue : [0xfd, 0x83] : "i16x8.all_true",
I16x8Bitmask : [0xfd, 0x84] : "i16x8.bitmask",
I16x8NarrowI32x4S : [0xfd, 0x85] : "i16x8.narrow_i32x4_s",
I16x8NarrowI32x4U : [0xfd, 0x86] : "i16x8.narrow_i32x4_u",
I16x8WidenLowI8x16S : [0xfd, 0x87] : "i16x8.widen_low_i8x16_s",
I16x8WidenHighI8x16S : [0xfd, 0x88] : "i16x8.widen_high_i8x16_s",
I16x8WidenLowI8x16U : [0xfd, 0x89] : "i16x8.widen_low_i8x16_u",
I16x8WidenHighI8x16u : [0xfd, 0x8a] : "i16x8.widen_high_i8x16_u",
I16x8Shl : [0xfd, 0x8b] : "i16x8.shl",
I16x8ShrS : [0xfd, 0x8c] : "i16x8.shr_s",
I16x8ShrU : [0xfd, 0x8d] : "i16x8.shr_u",
I16x8Add : [0xfd, 0x8e] : "i16x8.add",
I16x8AddSatS : [0xfd, 0x8f] : "i16x8.add_sat_s",
I16x8AddSatU : [0xfd, 0x90] : "i16x8.add_sat_u",
I16x8Sub : [0xfd, 0x91] : "i16x8.sub",
I16x8SubSatS : [0xfd, 0x92] : "i16x8.sub_sat_s",
I16x8SubSatU : [0xfd, 0x93] : "i16x8.sub_sat_u",
I16x8Mul : [0xfd, 0x95] : "i16x8.mul",
I16x8MinS : [0xfd, 0x96] : "i16x8.min_s",
I16x8MinU : [0xfd, 0x97] : "i16x8.min_u",
I16x8MaxS : [0xfd, 0x98] : "i16x8.max_s",
I16x8MaxU : [0xfd, 0x99] : "i16x8.max_u",
I16x8AvgrU : [0xfd, 0x9b] : "i16x8.avgr_u",
I32x4Abs : [0xfd, 0xa0] : "i32x4.abs",
I32x4Neg : [0xfd, 0xa1] : "i32x4.neg",
I32x4AnyTrue : [0xfd, 0xa2] : "i32x4.any_true",
I32x4AllTrue : [0xfd, 0xa3] : "i32x4.all_true",
I32x4Bitmask : [0xfd, 0xa4] : "i32x4.bitmask",
I32x4WidenLowI16x8S : [0xfd, 0xa7] : "i32x4.widen_low_i16x8_s",
I32x4WidenHighI16x8S : [0xfd, 0xa8] : "i32x4.widen_high_i16x8_s",
I32x4WidenLowI16x8U : [0xfd, 0xa9] : "i32x4.widen_low_i16x8_u",
I32x4WidenHighI16x8u : [0xfd, 0xaa] : "i32x4.widen_high_i16x8_u",
I32x4Shl : [0xfd, 0xab] : "i32x4.shl",
I32x4ShrS : [0xfd, 0xac] : "i32x4.shr_s",
I32x4ShrU : [0xfd, 0xad] : "i32x4.shr_u",
I32x4Add : [0xfd, 0xae] : "i32x4.add",
I32x4Sub : [0xfd, 0xb1] : "i32x4.sub",
I32x4Mul : [0xfd, 0xb5] : "i32x4.mul",
I32x4MinS : [0xfd, 0xb6] : "i32x4.min_s",
I32x4MinU : [0xfd, 0xb7] : "i32x4.min_u",
I32x4MaxS : [0xfd, 0xb8] : "i32x4.max_s",
I32x4MaxU : [0xfd, 0xb9] : "i32x4.max_u",
I32x4DotI16x8S : [0xfd, 0xba] : "i32x4.dot_i16x8_s",
I64x2Neg : [0xfd, 0xc1] : "i64x2.neg",
I64x2Shl : [0xfd, 0xcb] : "i64x2.shl",
I64x2ShrS : [0xfd, 0xcc] : "i64x2.shr_s",
I64x2ShrU : [0xfd, 0xcd] : "i64x2.shr_u",
I64x2Add : [0xfd, 0xce] : "i64x2.add",
I64x2Sub : [0xfd, 0xd1] : "i64x2.sub",
I64x2Mul : [0xfd, 0xd5] : "i64x2.mul",
F32x4Ceil : [0xfd, 0xd8] : "f32x4.ceil",
F32x4Floor : [0xfd, 0xd9] : "f32x4.floor",
F32x4Trunc : [0xfd, 0xda] : "f32x4.trunc",
F32x4Nearest : [0xfd, 0xdb] : "f32x4.nearest",
F64x2Ceil : [0xfd, 0xdc] : "f64x2.ceil",
F64x2Floor : [0xfd, 0xdd] : "f64x2.floor",
F64x2Trunc : [0xfd, 0xde] : "f64x2.trunc",
F64x2Nearest : [0xfd, 0xdf] : "f64x2.nearest",
F32x4Abs : [0xfd, 0xe0] : "f32x4.abs",
F32x4Neg : [0xfd, 0xe1] : "f32x4.neg",
F32x4Sqrt : [0xfd, 0xe3] : "f32x4.sqrt",
F32x4Add : [0xfd, 0xe4] : "f32x4.add",
F32x4Sub : [0xfd, 0xe5] : "f32x4.sub",
F32x4Mul : [0xfd, 0xe6] : "f32x4.mul",
F32x4Div : [0xfd, 0xe7] : "f32x4.div",
F32x4Min : [0xfd, 0xe8] : "f32x4.min",
F32x4Max : [0xfd, 0xe9] : "f32x4.max",
F32x4PMin : [0xfd, 0xea] : "f32x4.pmin",
F32x4PMax : [0xfd, 0xeb] : "f32x4.pmax",
F64x2Abs : [0xfd, 0xec] : "f64x2.abs",
F64x2Neg : [0xfd, 0xed] : "f64x2.neg",
F64x2Sqrt : [0xfd, 0xef] : "f64x2.sqrt",
F64x2Add : [0xfd, 0xf0] : "f64x2.add",
F64x2Sub : [0xfd, 0xf1] : "f64x2.sub",
F64x2Mul : [0xfd, 0xf2] : "f64x2.mul",
F64x2Div : [0xfd, 0xf3] : "f64x2.div",
F64x2Min : [0xfd, 0xf4] : "f64x2.min",
F64x2Max : [0xfd, 0xf5] : "f64x2.max",
F64x2PMin : [0xfd, 0xf6] : "f64x2.pmin",
F64x2PMax : [0xfd, 0xf7] : "f64x2.pmax",
I32x4TruncSatF32x4S : [0xfd, 0xf8] : "i32x4.trunc_sat_f32x4_s",
I32x4TruncSatF32x4U : [0xfd, 0xf9] : "i32x4.trunc_sat_f32x4_u",
F32x4ConvertI32x4S : [0xfd, 0xfa] : "f32x4.convert_i32x4_s",
F32x4ConvertI32x4U : [0xfd, 0xfb] : "f32x4.convert_i32x4_u",
V128Load32Zero(MemArg<4>) : [0xfd, 0xfc] : "v128.load32_zero",
V128Load64Zero(MemArg<8>) : [0xfd, 0xfd] : "v128.load64_zero",
// Exception handling proposal
CatchAll : [0x05] : "catch_all", // Reuses the else opcode.
Try(BlockType<'a>) : [0x06] : "try",
Catch(ast::Index<'a>) : [0x07] : "catch",
Throw(ast::Index<'a>) : [0x08] : "throw",
Rethrow(ast::Index<'a>) : [0x09] : "rethrow",
Unwind : [0x0a] : "unwind",
}
}
/// Extra information associated with block-related instructions.
///
/// This is used to label blocks and also annotate what types are expected for
/// the block.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct BlockType<'a> {
pub label: Option<ast::Id<'a>>,
pub ty: ast::TypeUse<'a, ast::FunctionType<'a>>,
}
impl<'a> Parse<'a> for BlockType<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(BlockType {
label: parser.parse()?,
ty: parser
.parse::<ast::TypeUse<'a, ast::FunctionTypeNoNames<'a>>>()?
.into(),
})
}
}
/// Extra information associated with the func.bind instruction.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct FuncBindType<'a> {
pub ty: ast::TypeUse<'a, ast::FunctionType<'a>>,
}
impl<'a> Parse<'a> for FuncBindType<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(FuncBindType {
ty: parser
.parse::<ast::TypeUse<'a, ast::FunctionTypeNoNames<'a>>>()?
.into(),
})
}
}
/// Extra information associated with the let instruction.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct LetType<'a> {
pub block: BlockType<'a>,
pub locals: Vec<ast::Local<'a>>,
}
impl<'a> Parse<'a> for LetType<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(LetType {
block: parser.parse()?,
locals: ast::Local::parse_remainder(parser)?,
})
}
}
/// Extra information associated with the `br_table` instruction.
#[allow(missing_docs)]
#[derive(Debug)]
pub struct BrTableIndices<'a> {
pub labels: Vec<ast::Index<'a>>,
pub default: ast::Index<'a>,
}
impl<'a> Parse<'a> for BrTableIndices<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let mut labels = Vec::new();
labels.push(parser.parse()?);
while parser.peek::<ast::Index>() {
labels.push(parser.parse()?);
}
let default = labels.pop().unwrap();
Ok(BrTableIndices { labels, default })
}
}
/// Payload for lane-related instructions. Unsigned with no + prefix.
#[derive(Debug)]
pub struct LaneArg {
/// The lane argument.
pub lane: u8,
}
impl<'a> Parse<'a> for LaneArg {
fn parse(parser: Parser<'a>) -> Result<Self> {
let lane = parser.step(|c| {
if let Some((i, rest)) = c.integer() {
if i.sign() == None {
let (src, radix) = i.val();
let val = u8::from_str_radix(src, radix)
.map_err(|_| c.error("malformed lane index"))?;
Ok((val, rest))
} else {
Err(c.error("unexpected token"))
}
} else {
Err(c.error("expected a lane index"))
}
})?;
Ok(LaneArg { lane })
}
}
/// Payload for memory-related instructions indicating offset/alignment of
/// memory accesses.
#[derive(Debug)]
pub struct MemArg<'a> {
/// The alignment of this access.
///
/// This is not stored as a log, this is the actual alignment (e.g. 1, 2, 4,
/// 8, etc).
pub align: u32,
/// The offset, in bytes of this access.
pub offset: u32,
/// The memory index we're accessing
pub memory: ast::Index<'a>,
}
impl<'a> MemArg<'a> {
fn parse(parser: Parser<'a>, default_align: u32) -> Result<Self> {
fn parse_field(name: &str, parser: Parser<'_>) -> Result<Option<u32>> {
parser.step(|c| {
let (kw, rest) = match c.keyword() {
Some(p) => p,
None => return Ok((None, c)),
};
if !kw.starts_with(name) {
return Ok((None, c));
}
let kw = &kw[name.len()..];
if !kw.starts_with("=") {
return Ok((None, c));
}
let num = &kw[1..];
let num = if num.starts_with("0x") {
match u32::from_str_radix(&num[2..], 16) {
Ok(n) => n,
Err(_) => return Err(c.error("i32 constant out of range")),
}
} else {
match num.parse() {
Ok(n) => n,
Err(_) => return Err(c.error("i32 constant out of range")),
}
};
Ok((Some(num), rest))
})
}
let memory = parser
.parse::<Option<_>>()?
.unwrap_or(ast::Index::Num(0, parser.prev_span()));
let offset = parse_field("offset", parser)?.unwrap_or(0);
let align = match parse_field("align", parser)? {
Some(n) if !n.is_power_of_two() => {
return Err(parser.error("alignment must be a power of two"))
}
n => n.unwrap_or(default_align),
};
Ok(MemArg {
offset,
align,
memory,
})
}
}
/// Extra data associated with the `call_indirect` instruction.
#[derive(Debug)]
pub struct CallIndirect<'a> {
/// The table that this call is going to be indexing.
pub table: ast::Index<'a>,
/// Type type signature that this `call_indirect` instruction is using.
pub ty: ast::TypeUse<'a, ast::FunctionType<'a>>,
}
impl<'a> Parse<'a> for CallIndirect<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let prev_span = parser.prev_span();
let mut table: Option<_> = parser.parse()?;
let ty = parser.parse::<ast::TypeUse<'a, ast::FunctionTypeNoNames<'a>>>()?;
// Turns out the official test suite at this time thinks table
// identifiers comes first but wabt's test suites asserts differently
// putting them second. Let's just handle both.
if table.is_none() {
table = parser.parse()?;
}
Ok(CallIndirect {
table: table.unwrap_or(ast::Index::Num(0, prev_span)),
ty: ty.into(),
})
}
}
/// Extra data associated with the `table.init` instruction
#[derive(Debug)]
pub struct TableInit<'a> {
/// The index of the table we're copying into.
pub table: ast::Index<'a>,
/// The index of the element segment we're copying into a table.
pub elem: ast::Index<'a>,
}
impl<'a> Parse<'a> for TableInit<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let prev_span = parser.prev_span();
let table_or_elem = parser.parse()?;
let (table, elem) = match parser.parse()? {
Some(elem) => (table_or_elem, elem),
None => (ast::Index::Num(0, prev_span), table_or_elem),
};
Ok(TableInit { table, elem })
}
}
/// Extra data associated with the `table.copy` instruction.
#[derive(Debug)]
pub struct TableCopy<'a> {
/// The index of the destination table to copy into.
pub dst: ast::Index<'a>,
/// The index of the source table to copy from.
pub src: ast::Index<'a>,
}
impl<'a> Parse<'a> for TableCopy<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let (dst, src) = if let Some(dst) = parser.parse()? {
(dst, parser.parse()?)
} else {
let span = parser.prev_span();
(ast::Index::Num(0, span), ast::Index::Num(0, span))
};
Ok(TableCopy { dst, src })
}
}
/// Extra data associated with unary table instructions.
#[derive(Debug)]
pub struct TableArg<'a> {
/// The index of the table argument.
pub dst: ast::Index<'a>,
}
impl<'a> Parse<'a> for TableArg<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let dst = if let Some(dst) = parser.parse()? {
dst
} else {
ast::Index::Num(0, parser.prev_span())
};
Ok(TableArg { dst })
}
}
/// Extra data associated with unary memory instructions.
#[derive(Debug)]
pub struct MemoryArg<'a> {
/// The index of the memory space.
pub mem: ast::Index<'a>,
}
impl<'a> Parse<'a> for MemoryArg<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let mem = if let Some(mem) = parser.parse()? {
mem
} else {
ast::Index::Num(0, parser.prev_span())
};
Ok(MemoryArg { mem })
}
}
/// Extra data associated with the `memory.init` instruction
#[derive(Debug)]
pub struct MemoryInit<'a> {
/// The index of the data segment we're copying into memory.
pub data: ast::Index<'a>,
/// The index of the memory we're copying into,
pub mem: ast::Index<'a>,
}
impl<'a> Parse<'a> for MemoryInit<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let data = parser.parse()?;
let mem = parser
.parse::<Option<_>>()?
.unwrap_or(ast::Index::Num(0, parser.prev_span()));
Ok(MemoryInit { data, mem })
}
}
/// Extra data associated with the `memory.copy` instruction
#[derive(Debug)]
pub struct MemoryCopy<'a> {
/// The index of the memory we're copying from.
pub src: ast::Index<'a>,
/// The index of the memory we're copying to.
pub dst: ast::Index<'a>,
}
impl<'a> Parse<'a> for MemoryCopy<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let (src, dst) = match parser.parse()? {
Some(dst) => (parser.parse()?, dst),
None => {
let idx = ast::Index::Num(0, parser.prev_span());
(idx, idx)
}
};
Ok(MemoryCopy { src, dst })
}
}
/// Extra data associated with the `struct.get/set` instructions
#[derive(Debug)]
pub struct StructAccess<'a> {
/// The index of the struct type we're accessing.
pub r#struct: ast::Index<'a>,
/// The index of the field of the struct we're accessing
pub field: ast::Index<'a>,
}
impl<'a> Parse<'a> for StructAccess<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(StructAccess {
r#struct: parser.parse()?,
field: parser.parse()?,
})
}
}
/// Extra data associated with the `struct.narrow` instruction
#[derive(Debug)]
pub struct StructNarrow<'a> {
/// The type of the struct we're casting from
pub from: ast::ValType<'a>,
/// The type of the struct we're casting to
pub to: ast::ValType<'a>,
}
impl<'a> Parse<'a> for StructNarrow<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(StructNarrow {
from: parser.parse()?,
to: parser.parse()?,
})
}
}
/// Different ways to specify a `v128.const` instruction
#[derive(Debug)]
#[rustfmt::skip]
#[allow(missing_docs)]
pub enum V128Const {
I8x16([i8; 16]),
I16x8([i16; 8]),
I32x4([i32; 4]),
I64x2([i64; 2]),
F32x4([ast::Float32; 4]),
F64x2([ast::Float64; 2]),
}
impl V128Const {
/// Returns the raw little-ended byte sequence used to represent this
/// `v128` constant`
///
/// This is typically suitable for encoding as the payload of the
/// `v128.const` instruction.
#[rustfmt::skip]
pub fn to_le_bytes(&self) -> [u8; 16] {
match self {
V128Const::I8x16(arr) => [
arr[0] as u8,
arr[1] as u8,
arr[2] as u8,
arr[3] as u8,
arr[4] as u8,
arr[5] as u8,
arr[6] as u8,
arr[7] as u8,
arr[8] as u8,
arr[9] as u8,
arr[10] as u8,
arr[11] as u8,
arr[12] as u8,
arr[13] as u8,
arr[14] as u8,
arr[15] as u8,
],
V128Const::I16x8(arr) => {
let a1 = arr[0].to_le_bytes();
let a2 = arr[1].to_le_bytes();
let a3 = arr[2].to_le_bytes();
let a4 = arr[3].to_le_bytes();
let a5 = arr[4].to_le_bytes();
let a6 = arr[5].to_le_bytes();
let a7 = arr[6].to_le_bytes();
let a8 = arr[7].to_le_bytes();
[
a1[0], a1[1],
a2[0], a2[1],
a3[0], a3[1],
a4[0], a4[1],
a5[0], a5[1],
a6[0], a6[1],
a7[0], a7[1],
a8[0], a8[1],
]
}
V128Const::I32x4(arr) => {
let a1 = arr[0].to_le_bytes();
let a2 = arr[1].to_le_bytes();
let a3 = arr[2].to_le_bytes();
let a4 = arr[3].to_le_bytes();
[
a1[0], a1[1], a1[2], a1[3],
a2[0], a2[1], a2[2], a2[3],
a3[0], a3[1], a3[2], a3[3],
a4[0], a4[1], a4[2], a4[3],
]
}
V128Const::I64x2(arr) => {
let a1 = arr[0].to_le_bytes();
let a2 = arr[1].to_le_bytes();
[
a1[0], a1[1], a1[2], a1[3], a1[4], a1[5], a1[6], a1[7],
a2[0], a2[1], a2[2], a2[3], a2[4], a2[5], a2[6], a2[7],
]
}
V128Const::F32x4(arr) => {
let a1 = arr[0].bits.to_le_bytes();
let a2 = arr[1].bits.to_le_bytes();
let a3 = arr[2].bits.to_le_bytes();
let a4 = arr[3].bits.to_le_bytes();
[
a1[0], a1[1], a1[2], a1[3],
a2[0], a2[1], a2[2], a2[3],
a3[0], a3[1], a3[2], a3[3],
a4[0], a4[1], a4[2], a4[3],
]
}
V128Const::F64x2(arr) => {
let a1 = arr[0].bits.to_le_bytes();
let a2 = arr[1].bits.to_le_bytes();
[
a1[0], a1[1], a1[2], a1[3], a1[4], a1[5], a1[6], a1[7],
a2[0], a2[1], a2[2], a2[3], a2[4], a2[5], a2[6], a2[7],
]
}
}
}
}
impl<'a> Parse<'a> for V128Const {
fn parse(parser: Parser<'a>) -> Result<Self> {
let mut l = parser.lookahead1();
if l.peek::<kw::i8x16>() {
parser.parse::<kw::i8x16>()?;
Ok(V128Const::I8x16([
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
]))
} else if l.peek::<kw::i16x8>() {
parser.parse::<kw::i16x8>()?;
Ok(V128Const::I16x8([
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
]))
} else if l.peek::<kw::i32x4>() {
parser.parse::<kw::i32x4>()?;
Ok(V128Const::I32x4([
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
]))
} else if l.peek::<kw::i64x2>() {
parser.parse::<kw::i64x2>()?;
Ok(V128Const::I64x2([parser.parse()?, parser.parse()?]))
} else if l.peek::<kw::f32x4>() {
parser.parse::<kw::f32x4>()?;
Ok(V128Const::F32x4([
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
]))
} else if l.peek::<kw::f64x2>() {
parser.parse::<kw::f64x2>()?;
Ok(V128Const::F64x2([parser.parse()?, parser.parse()?]))
} else {
Err(l.error())
}
}
}
/// Lanes being shuffled in the `i8x16.shuffle` instruction
#[derive(Debug)]
pub struct I8x16Shuffle {
#[allow(missing_docs)]
pub lanes: [u8; 16],
}
impl<'a> Parse<'a> for I8x16Shuffle {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(I8x16Shuffle {
lanes: [
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
parser.parse()?,
],
})
}
}
/// Payload of the `select` instructions
#[derive(Debug)]
pub struct SelectTypes<'a> {
#[allow(missing_docs)]
pub tys: Option<Vec<ast::ValType<'a>>>,
}
impl<'a> Parse<'a> for SelectTypes<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let mut tys = None;
while parser.peek2::<kw::result>() {
let mut list = Vec::new();
parser.parens(|p| {
p.parse::<kw::result>()?;
while !p.is_empty() {
list.push(p.parse()?);
}
Ok(())
})?;
tys = Some(list);
}
Ok(SelectTypes { tys })
}
}
/// Payload of the `br_on_exn` instruction
#[derive(Debug)]
#[allow(missing_docs)]
pub struct BrOnExn<'a> {
pub label: ast::Index<'a>,
pub exn: ast::Index<'a>,
}
impl<'a> Parse<'a> for BrOnExn<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let label = parser.parse()?;
let exn = parser.parse()?;
Ok(BrOnExn { label, exn })
}
}
/// Payload of the `br_on_cast` instruction
#[derive(Debug)]
#[allow(missing_docs)]
pub struct BrOnCast<'a> {
pub label: ast::Index<'a>,
pub val: HeapType<'a>,
pub rtt: HeapType<'a>,
}
impl<'a> Parse<'a> for BrOnCast<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let label = parser.parse()?;
let val = parser.parse()?;
let rtt = parser.parse()?;
Ok(BrOnCast { label, val, rtt })
}
}
/// Payload of the `rtt.sub` instruction
#[derive(Debug)]
#[allow(missing_docs)]
pub struct RTTSub<'a> {
pub depth: u32,
pub input_rtt: HeapType<'a>,
pub output_rtt: HeapType<'a>,
}
impl<'a> Parse<'a> for RTTSub<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let depth = parser.parse()?;
let input_rtt = parser.parse()?;
let output_rtt = parser.parse()?;
Ok(RTTSub {
depth,
input_rtt,
output_rtt,
})
}
}
/// Payload of the `ref.test/cast` instruction
#[derive(Debug)]
#[allow(missing_docs)]
pub struct RefTest<'a> {
pub val: HeapType<'a>,
pub rtt: HeapType<'a>,
}
impl<'a> Parse<'a> for RefTest<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let val = parser.parse()?;
let rtt = parser.parse()?;
Ok(RefTest { val, rtt })
}
}
| 40.07164 | 108 | 0.523411 |
5605cfe23a5cbae46684369362c1bd86d21345d0 | 327 | use htm::triangle_to_halfspace;
use nalgebra::Vector3;
fn main() {
let p1 = Vector3::new(0.5, 0.5, (0.5f32).sqrt());
let p2 = Vector3::new(0.5, 0.5, -(0.5f32).sqrt());
let p3 = Vector3::new(0.0, 1.0, 0.0);
let halfspace = triangle_to_halfspace(&[p1, p2, p3]);
println!("The halfspace is {}", halfspace);
}
| 29.727273 | 57 | 0.602446 |
d95a74be77696018fffe42c381b30c9885af4e69 | 37,974 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use build;
use build::scope::{CachedBlock, DropKind};
use hair::cx::Cx;
use hair::{LintLevel, BindingMode, PatternKind};
use rustc::hir;
use rustc::hir::Node;
use rustc::hir::def_id::{DefId, LocalDefId};
use rustc::middle::region;
use rustc::mir::*;
use rustc::mir::visit::{MutVisitor, TyContext};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::Substs;
use rustc::util::nodemap::NodeMap;
use rustc_target::spec::PanicStrategy;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use shim;
use std::mem;
use std::u32;
use rustc_target::spec::abi::Abi;
use syntax::ast;
use syntax::attr::{self, UnwindAttr};
use syntax::symbol::keywords;
use syntax_pos::Span;
use transform::MirSource;
use util as mir_util;
use super::lints;
/// Construct the MIR for a given def-id.
pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'tcx> {
let id = tcx.hir.as_local_node_id(def_id).unwrap();
// Figure out what primary body this item has.
let (body_id, return_ty_span) = match tcx.hir.get(id) {
Node::Variant(variant) =>
return create_constructor_shim(tcx, id, &variant.node.data),
Node::StructCtor(ctor) =>
return create_constructor_shim(tcx, id, ctor),
Node::Expr(hir::Expr { node: hir::ExprKind::Closure(_, decl, body_id, _, _), .. })
| Node::Item(hir::Item { node: hir::ItemKind::Fn(decl, _, _, body_id), .. })
| Node::ImplItem(
hir::ImplItem {
node: hir::ImplItemKind::Method(hir::MethodSig { decl, .. }, body_id),
..
}
)
| Node::TraitItem(
hir::TraitItem {
node: hir::TraitItemKind::Method(
hir::MethodSig { decl, .. },
hir::TraitMethod::Provided(body_id),
),
..
}
) => {
(*body_id, decl.output.span())
}
Node::Item(hir::Item { node: hir::ItemKind::Static(ty, _, body_id), .. })
| Node::Item(hir::Item { node: hir::ItemKind::Const(ty, body_id), .. })
| Node::ImplItem(hir::ImplItem { node: hir::ImplItemKind::Const(ty, body_id), .. })
| Node::TraitItem(
hir::TraitItem { node: hir::TraitItemKind::Const(ty, Some(body_id)), .. }
) => {
(*body_id, ty.span)
}
Node::AnonConst(hir::AnonConst { body, id, .. }) => {
(*body, tcx.hir.span(*id))
}
_ => span_bug!(tcx.hir.span(id), "can't build MIR for {:?}", def_id),
};
tcx.infer_ctxt().enter(|infcx| {
let cx = Cx::new(&infcx, id);
let mut mir = if cx.tables().tainted_by_errors {
build::construct_error(cx, body_id)
} else if let hir::BodyOwnerKind::Fn = cx.body_owner_kind {
// fetch the fully liberated fn signature (that is, all bound
// types/lifetimes replaced)
let fn_hir_id = tcx.hir.node_to_hir_id(id);
let fn_sig = cx.tables().liberated_fn_sigs()[fn_hir_id].clone();
let ty = tcx.type_of(tcx.hir.local_def_id(id));
let mut abi = fn_sig.abi;
let implicit_argument = match ty.sty {
ty::Closure(..) => {
// HACK(eddyb) Avoid having RustCall on closures,
// as it adds unnecessary (and wrong) auto-tupling.
abi = Abi::Rust;
Some(ArgInfo(liberated_closure_env_ty(tcx, id, body_id), None, None, None))
}
ty::Generator(..) => {
let gen_ty = tcx.body_tables(body_id).node_id_to_type(fn_hir_id);
Some(ArgInfo(gen_ty, None, None, None))
}
_ => None,
};
// FIXME: safety in closures
let safety = match fn_sig.unsafety {
hir::Unsafety::Normal => Safety::Safe,
hir::Unsafety::Unsafe => Safety::FnUnsafe,
};
let body = tcx.hir.body(body_id);
let explicit_arguments =
body.arguments
.iter()
.enumerate()
.map(|(index, arg)| {
let owner_id = tcx.hir.body_owner(body_id);
let opt_ty_info;
let self_arg;
if let Some(ref fn_decl) = tcx.hir.fn_decl(owner_id) {
let ty_hir_id = fn_decl.inputs[index].hir_id;
let ty_span = tcx.hir.span(tcx.hir.hir_to_node_id(ty_hir_id));
opt_ty_info = Some(ty_span);
self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() {
match fn_decl.implicit_self {
hir::ImplicitSelfKind::Imm => Some(ImplicitSelfKind::Imm),
hir::ImplicitSelfKind::Mut => Some(ImplicitSelfKind::Mut),
hir::ImplicitSelfKind::ImmRef => Some(ImplicitSelfKind::ImmRef),
hir::ImplicitSelfKind::MutRef => Some(ImplicitSelfKind::MutRef),
_ => None,
}
} else {
None
};
} else {
opt_ty_info = None;
self_arg = None;
}
ArgInfo(fn_sig.inputs()[index], opt_ty_info, Some(&*arg.pat), self_arg)
});
let arguments = implicit_argument.into_iter().chain(explicit_arguments);
let (yield_ty, return_ty) = if body.is_generator {
let gen_sig = match ty.sty {
ty::Generator(gen_def_id, gen_substs, ..) =>
gen_substs.sig(gen_def_id, tcx),
_ =>
span_bug!(tcx.hir.span(id), "generator w/o generator type: {:?}", ty),
};
(Some(gen_sig.yield_ty), gen_sig.return_ty)
} else {
(None, fn_sig.output())
};
build::construct_fn(cx, id, arguments, safety, abi,
return_ty, yield_ty, return_ty_span, body)
} else {
build::construct_const(cx, body_id, return_ty_span)
};
// Convert the Mir to global types.
let mut globalizer = GlobalizeMir {
tcx,
span: mir.span
};
globalizer.visit_mir(&mut mir);
let mir = unsafe {
mem::transmute::<Mir, Mir<'tcx>>(mir)
};
mir_util::dump_mir(tcx, None, "mir_map", &0,
MirSource::item(def_id), &mir, |_, _| Ok(()) );
lints::check(tcx, &mir, def_id);
mir
})
}
/// A pass to lift all the types and substitutions in a Mir
/// to the global tcx. Sadly, we don't have a "folder" that
/// can change 'tcx so we have to transmute afterwards.
struct GlobalizeMir<'a, 'gcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'gcx>,
span: Span
}
impl<'a, 'gcx: 'tcx, 'tcx> MutVisitor<'tcx> for GlobalizeMir<'a, 'gcx> {
fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
if let Some(lifted) = self.tcx.lift(ty) {
*ty = lifted;
} else {
span_bug!(self.span,
"found type `{:?}` with inference types/regions in MIR",
ty);
}
}
fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) {
if let Some(lifted) = self.tcx.lift(region) {
*region = lifted;
} else {
span_bug!(self.span,
"found region `{:?}` with inference types/regions in MIR",
region);
}
}
fn visit_const(&mut self, constant: &mut &'tcx ty::Const<'tcx>, _: Location) {
if let Some(lifted) = self.tcx.lift(constant) {
*constant = lifted;
} else {
span_bug!(self.span,
"found constant `{:?}` with inference types/regions in MIR",
constant);
}
}
fn visit_substs(&mut self, substs: &mut &'tcx Substs<'tcx>, _: Location) {
if let Some(lifted) = self.tcx.lift(substs) {
*substs = lifted;
} else {
span_bug!(self.span,
"found substs `{:?}` with inference types/regions in MIR",
substs);
}
}
}
fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ctor_id: ast::NodeId,
v: &'tcx hir::VariantData)
-> Mir<'tcx>
{
let span = tcx.hir.span(ctor_id);
if let hir::VariantData::Tuple(ref fields, ctor_id) = *v {
tcx.infer_ctxt().enter(|infcx| {
let mut mir = shim::build_adt_ctor(&infcx, ctor_id, fields, span);
// Convert the Mir to global types.
let tcx = infcx.tcx.global_tcx();
let mut globalizer = GlobalizeMir {
tcx,
span: mir.span
};
globalizer.visit_mir(&mut mir);
let mir = unsafe {
mem::transmute::<Mir, Mir<'tcx>>(mir)
};
mir_util::dump_mir(tcx, None, "mir_map", &0,
MirSource::item(tcx.hir.local_def_id(ctor_id)),
&mir, |_, _| Ok(()) );
mir
})
} else {
span_bug!(span, "attempting to create MIR for non-tuple variant {:?}", v);
}
}
///////////////////////////////////////////////////////////////////////////
// BuildMir -- walks a crate, looking for fn items and methods to build MIR from
fn liberated_closure_env_ty<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
closure_expr_id: ast::NodeId,
body_id: hir::BodyId)
-> Ty<'tcx> {
let closure_expr_hir_id = tcx.hir.node_to_hir_id(closure_expr_id);
let closure_ty = tcx.body_tables(body_id).node_id_to_type(closure_expr_hir_id);
let (closure_def_id, closure_substs) = match closure_ty.sty {
ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs),
_ => bug!("closure expr does not have closure type: {:?}", closure_ty)
};
let closure_env_ty = tcx.closure_env_ty(closure_def_id, closure_substs).unwrap();
tcx.liberate_late_bound_regions(closure_def_id, &closure_env_ty)
}
#[derive(Debug, PartialEq, Eq)]
pub enum BlockFrame {
/// Evaluation is currently within a statement.
///
/// Examples include:
/// 1. `EXPR;`
/// 2. `let _ = EXPR;`
/// 3. `let x = EXPR;`
Statement {
/// If true, then statement discards result from evaluating
/// the expression (such as examples 1 and 2 above).
ignores_expr_result: bool
},
/// Evaluation is currently within the tail expression of a block.
///
/// Example: `{ STMT_1; STMT_2; EXPR }`
TailExpr {
/// If true, then the surrounding context of the block ignores
/// the result of evaluating the block's tail expression.
///
/// Example: `let _ = { STMT_1; EXPR };`
tail_result_is_ignored: bool
},
/// Generic mark meaning that the block occurred as a subexpression
/// where the result might be used.
///
/// Examples: `foo(EXPR)`, `match EXPR { ... }`
SubExpr,
}
impl BlockFrame {
fn is_tail_expr(&self) -> bool {
match *self {
BlockFrame::TailExpr { .. } => true,
BlockFrame::Statement { .. } |
BlockFrame::SubExpr => false,
}
}
fn is_statement(&self) -> bool {
match *self {
BlockFrame::Statement { .. } => true,
BlockFrame::TailExpr { .. } |
BlockFrame::SubExpr => false,
}
}
}
#[derive(Debug)]
struct BlockContext(Vec<BlockFrame>);
struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
hir: Cx<'a, 'gcx, 'tcx>,
cfg: CFG<'tcx>,
fn_span: Span,
arg_count: usize,
/// the current set of scopes, updated as we traverse;
/// see the `scope` module for more details
scopes: Vec<scope::Scope<'tcx>>,
/// the block-context: each time we build the code within an hair::Block,
/// we push a frame here tracking whether we are building a statement or
/// if we are pushing the tail expression of the block. This is used to
/// embed information in generated temps about whether they were created
/// for a block tail expression or not.
///
/// It would be great if we could fold this into `self.scopes`
/// somehow; but right now I think that is very tightly tied to
/// the code generation in ways that we cannot (or should not)
/// start just throwing new entries onto that vector in order to
/// distinguish the context of EXPR1 from the context of EXPR2 in
/// `{ STMTS; EXPR1 } + EXPR2`
block_context: BlockContext,
/// The current unsafe block in scope, even if it is hidden by
/// a PushUnsafeBlock
unpushed_unsafe: Safety,
/// The number of `push_unsafe_block` levels in scope
push_unsafe_count: usize,
/// the current set of breakables; see the `scope` module for more
/// details
breakable_scopes: Vec<scope::BreakableScope<'tcx>>,
/// the vector of all scopes that we have created thus far;
/// we track this for debuginfo later
source_scopes: IndexVec<SourceScope, SourceScopeData>,
source_scope_local_data: IndexVec<SourceScope, SourceScopeLocalData>,
source_scope: SourceScope,
/// the guard-context: each time we build the guard expression for
/// a match arm, we push onto this stack, and then pop when we
/// finish building it.
guard_context: Vec<GuardFrame>,
/// Maps node ids of variable bindings to the `Local`s created for them.
/// (A match binding can have two locals; the 2nd is for the arm's guard.)
var_indices: NodeMap<LocalsForNode>,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
upvar_decls: Vec<UpvarDecl>,
unit_temp: Option<Place<'tcx>>,
/// cached block with the RESUME terminator; this is created
/// when first set of cleanups are built.
cached_resume_block: Option<BasicBlock>,
/// cached block with the RETURN terminator
cached_return_block: Option<BasicBlock>,
/// cached block with the UNREACHABLE terminator
cached_unreachable_block: Option<BasicBlock>,
}
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
fn is_bound_var_in_guard(&self, id: ast::NodeId) -> bool {
self.guard_context.iter().any(|frame| frame.locals.iter().any(|local| local.id == id))
}
fn var_local_id(&self, id: ast::NodeId, for_guard: ForGuard) -> Local {
self.var_indices[&id].local_id(for_guard)
}
}
impl BlockContext {
fn new() -> Self { BlockContext(vec![]) }
fn push(&mut self, bf: BlockFrame) { self.0.push(bf); }
fn pop(&mut self) -> Option<BlockFrame> { self.0.pop() }
/// Traverses the frames on the BlockContext, searching for either
/// the first block-tail expression frame with no intervening
/// statement frame.
///
/// Notably, this skips over `SubExpr` frames; this method is
/// meant to be used in the context of understanding the
/// relationship of a temp (created within some complicated
/// expression) with its containing expression, and whether the
/// value of that *containing expression* (not the temp!) is
/// ignored.
fn currently_in_block_tail(&self) -> Option<BlockTailInfo> {
for bf in self.0.iter().rev() {
match bf {
BlockFrame::SubExpr => continue,
BlockFrame::Statement { .. } => break,
&BlockFrame::TailExpr { tail_result_is_ignored } =>
return Some(BlockTailInfo { tail_result_is_ignored })
}
}
return None;
}
/// Looks at the topmost frame on the BlockContext and reports
/// whether its one that would discard a block tail result.
///
/// Unlike `currently_within_ignored_tail_expression`, this does
/// *not* skip over `SubExpr` frames: here, we want to know
/// whether the block result itself is discarded.
fn currently_ignores_tail_results(&self) -> bool {
match self.0.last() {
// no context: conservatively assume result is read
None => false,
// sub-expression: block result feeds into some computation
Some(BlockFrame::SubExpr) => false,
// otherwise: use accumulated is_ignored state.
Some(BlockFrame::TailExpr { tail_result_is_ignored: ignored }) |
Some(BlockFrame::Statement { ignores_expr_result: ignored }) => *ignored,
}
}
}
#[derive(Debug)]
enum LocalsForNode {
/// In the usual case, a node-id for an identifier maps to at most
/// one Local declaration.
One(Local),
/// The exceptional case is identifiers in a match arm's pattern
/// that are referenced in a guard of that match arm. For these,
/// we can have `2+k` Locals, where `k` is the number of candidate
/// patterns (separated by `|`) in the arm.
///
/// * `for_arm_body` is the Local used in the arm body (which is
/// just like the `One` case above),
///
/// * `ref_for_guard` is the Local used in the arm's guard (which
/// is a reference to a temp that is an alias of
/// `for_arm_body`).
///
/// * `vals_for_guard` is the `k` Locals; at most one of them will
/// get initialized by the arm's execution, and after it is
/// initialized, `ref_for_guard` will be assigned a reference to
/// it.
///
/// There reason we have `k` Locals rather than just 1 is to
/// accommodate some restrictions imposed by two-phase borrows,
/// which apply when we have a `ref mut` pattern.
ForGuard { vals_for_guard: Vec<Local>, ref_for_guard: Local, for_arm_body: Local },
}
#[derive(Debug)]
struct GuardFrameLocal {
id: ast::NodeId,
}
impl GuardFrameLocal {
fn new(id: ast::NodeId, _binding_mode: BindingMode) -> Self {
GuardFrameLocal {
id: id,
}
}
}
#[derive(Debug)]
struct GuardFrame {
/// These are the id's of names that are bound by patterns of the
/// arm of *this* guard.
///
/// (Frames higher up the stack will have the id's bound in arms
/// further out, such as in a case like:
///
/// match E1 {
/// P1(id1) if (... (match E2 { P2(id2) if ... => B2 })) => B1,
/// }
///
/// here, when building for FIXME
locals: Vec<GuardFrameLocal>,
}
/// ForGuard indicates whether we are talking about:
/// 1. the temp for a local binding used solely within guard expressions,
/// 2. the temp that holds reference to (1.), which is actually what the
/// guard expressions see, or
/// 3. the temp for use outside of guard expressions.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum ForGuard {
/// The `usize` identifies for which candidate pattern we want the
/// local binding. We keep a temp per-candidate to accommodate
/// two-phase borrows (see `LocalsForNode` documentation).
ValWithinGuard(usize),
RefWithinGuard,
OutsideGuard,
}
impl LocalsForNode {
fn local_id(&self, for_guard: ForGuard) -> Local {
match (self, for_guard) {
(&LocalsForNode::One(local_id), ForGuard::OutsideGuard) |
(&LocalsForNode::ForGuard { ref_for_guard: local_id, .. }, ForGuard::RefWithinGuard) |
(&LocalsForNode::ForGuard { for_arm_body: local_id, .. }, ForGuard::OutsideGuard) =>
local_id,
(&LocalsForNode::ForGuard { ref vals_for_guard, .. },
ForGuard::ValWithinGuard(pat_idx)) =>
vals_for_guard[pat_idx],
(&LocalsForNode::One(_), ForGuard::ValWithinGuard(_)) |
(&LocalsForNode::One(_), ForGuard::RefWithinGuard) =>
bug!("anything with one local should never be within a guard."),
}
}
}
struct CFG<'tcx> {
basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
}
newtype_index! {
pub struct ScopeId { .. }
}
///////////////////////////////////////////////////////////////////////////
/// The `BlockAnd` "monad" packages up the new basic block along with a
/// produced value (sometimes just unit, of course). The `unpack!`
/// macro (and methods below) makes working with `BlockAnd` much more
/// convenient.
#[must_use = "if you don't use one of these results, you're leaving a dangling edge"]
struct BlockAnd<T>(BasicBlock, T);
trait BlockAndExtension {
fn and<T>(self, v: T) -> BlockAnd<T>;
fn unit(self) -> BlockAnd<()>;
}
impl BlockAndExtension for BasicBlock {
fn and<T>(self, v: T) -> BlockAnd<T> {
BlockAnd(self, v)
}
fn unit(self) -> BlockAnd<()> {
BlockAnd(self, ())
}
}
/// Update a block pointer and return the value.
/// Use it like `let x = unpack!(block = self.foo(block, foo))`.
macro_rules! unpack {
($x:ident = $c:expr) => {
{
let BlockAnd(b, v) = $c;
$x = b;
v
}
};
($c:expr) => {
{
let BlockAnd(b, ()) = $c;
b
}
};
}
fn should_abort_on_panic<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
fn_def_id: DefId,
abi: Abi)
-> bool {
// Not callable from C, so we can safely unwind through these
if abi == Abi::Rust || abi == Abi::RustCall { return false; }
// We never unwind, so it's not relevant to stop an unwind
if tcx.sess.panic_strategy() != PanicStrategy::Unwind { return false; }
// We cannot add landing pads, so don't add one
if tcx.sess.no_landing_pads() { return false; }
// This is a special case: some functions have a C abi but are meant to
// unwind anyway. Don't stop them.
let attrs = &tcx.get_attrs(fn_def_id);
match attr::find_unwind_attr(Some(tcx.sess.diagnostic()), attrs) {
None => {
// FIXME(rust-lang/rust#48251) -- Had to disable
// abort-on-panic for backwards compatibility reasons.
false
}
Some(UnwindAttr::Allowed) => false,
Some(UnwindAttr::Aborts) => true,
}
}
///////////////////////////////////////////////////////////////////////////
/// the main entry point for building MIR for a function
struct ArgInfo<'gcx>(Ty<'gcx>,
Option<Span>,
Option<&'gcx hir::Pat>,
Option<ImplicitSelfKind>);
fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
fn_id: ast::NodeId,
arguments: A,
safety: Safety,
abi: Abi,
return_ty: Ty<'gcx>,
yield_ty: Option<Ty<'gcx>>,
return_ty_span: Span,
body: &'gcx hir::Body)
-> Mir<'tcx>
where A: Iterator<Item=ArgInfo<'gcx>>
{
let arguments: Vec<_> = arguments.collect();
let tcx = hir.tcx();
let span = tcx.hir.span(fn_id);
// Gather the upvars of a closure, if any.
let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| {
freevars.iter().map(|fv| {
let var_id = fv.var_id();
let var_hir_id = tcx.hir.node_to_hir_id(var_id);
let closure_expr_id = tcx.hir.local_def_id(fn_id);
let capture = hir.tables().upvar_capture(ty::UpvarId {
var_path: ty::UpvarPath {hir_id: var_hir_id},
closure_expr_id: LocalDefId::from_def_id(closure_expr_id),
});
let by_ref = match capture {
ty::UpvarCapture::ByValue => false,
ty::UpvarCapture::ByRef(..) => true
};
let mut decl = UpvarDecl {
debug_name: keywords::Invalid.name(),
var_hir_id: ClearCrossCrate::Set(var_hir_id),
by_ref,
mutability: Mutability::Not,
};
if let Some(Node::Binding(pat)) = tcx.hir.find(var_id) {
if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
decl.debug_name = ident.name;
if let Some(&bm) = hir.tables.pat_binding_modes().get(pat.hir_id) {
if bm == ty::BindByValue(hir::MutMutable) {
decl.mutability = Mutability::Mut;
} else {
decl.mutability = Mutability::Not;
}
} else {
tcx.sess.delay_span_bug(pat.span, "missing binding mode");
}
}
}
decl
}).collect()
});
let mut builder = Builder::new(hir,
span,
arguments.len(),
safety,
return_ty,
return_ty_span,
upvar_decls);
let fn_def_id = tcx.hir.local_def_id(fn_id);
let call_site_scope = region::Scope {
id: body.value.hir_id.local_id,
data: region::ScopeData::CallSite
};
let arg_scope = region::Scope {
id: body.value.hir_id.local_id,
data: region::ScopeData::Arguments
};
let mut block = START_BLOCK;
let source_info = builder.source_info(span);
let call_site_s = (call_site_scope, source_info);
unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, block, |builder| {
if should_abort_on_panic(tcx, fn_def_id, abi) {
builder.schedule_abort();
}
let arg_scope_s = (arg_scope, source_info);
unpack!(block = builder.in_scope(arg_scope_s, LintLevel::Inherited, block, |builder| {
builder.args_and_body(block, &arguments, arg_scope, &body.value)
}));
// Attribute epilogue to function's closing brace
let fn_end = span.shrink_to_hi();
let source_info = builder.source_info(fn_end);
let return_block = builder.return_block();
builder.cfg.terminate(block, source_info,
TerminatorKind::Goto { target: return_block });
builder.cfg.terminate(return_block, source_info,
TerminatorKind::Return);
// Attribute any unreachable codepaths to the function's closing brace
if let Some(unreachable_block) = builder.cached_unreachable_block {
builder.cfg.terminate(unreachable_block, source_info,
TerminatorKind::Unreachable);
}
return_block.unit()
}));
assert_eq!(block, builder.return_block());
let mut spread_arg = None;
if abi == Abi::RustCall {
// RustCall pseudo-ABI untuples the last argument.
spread_arg = Some(Local::new(arguments.len()));
}
let closure_expr_id = tcx.hir.local_def_id(fn_id);
info!("fn_id {:?} has attrs {:?}", closure_expr_id,
tcx.get_attrs(closure_expr_id));
let mut mir = builder.finish(yield_ty);
mir.spread_arg = spread_arg;
mir
}
fn construct_const<'a, 'gcx, 'tcx>(
hir: Cx<'a, 'gcx, 'tcx>,
body_id: hir::BodyId,
ty_span: Span,
) -> Mir<'tcx> {
let tcx = hir.tcx();
let ast_expr = &tcx.hir.body(body_id).value;
let ty = hir.tables().expr_ty_adjusted(ast_expr);
let owner_id = tcx.hir.body_owner(body_id);
let span = tcx.hir.span(owner_id);
let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, ty_span,vec![]);
let mut block = START_BLOCK;
let expr = builder.hir.mirror(ast_expr);
unpack!(block = builder.into_expr(&Place::Local(RETURN_PLACE), block, expr));
let source_info = builder.source_info(span);
builder.cfg.terminate(block, source_info, TerminatorKind::Return);
// Constants can't `return` so a return block should not be created.
assert_eq!(builder.cached_return_block, None);
// Constants may be match expressions in which case an unreachable block may
// be created, so terminate it properly.
if let Some(unreachable_block) = builder.cached_unreachable_block {
builder.cfg.terminate(unreachable_block, source_info,
TerminatorKind::Unreachable);
}
builder.finish(None)
}
fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
body_id: hir::BodyId)
-> Mir<'tcx> {
let owner_id = hir.tcx().hir.body_owner(body_id);
let span = hir.tcx().hir.span(owner_id);
let ty = hir.tcx().types.err;
let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, span, vec![]);
let source_info = builder.source_info(span);
builder.cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable);
builder.finish(None)
}
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
fn new(hir: Cx<'a, 'gcx, 'tcx>,
span: Span,
arg_count: usize,
safety: Safety,
return_ty: Ty<'tcx>,
return_span: Span,
upvar_decls: Vec<UpvarDecl>)
-> Builder<'a, 'gcx, 'tcx> {
let lint_level = LintLevel::Explicit(hir.root_lint_level);
let mut builder = Builder {
hir,
cfg: CFG { basic_blocks: IndexVec::new() },
fn_span: span,
arg_count,
scopes: vec![],
block_context: BlockContext::new(),
source_scopes: IndexVec::new(),
source_scope: OUTERMOST_SOURCE_SCOPE,
source_scope_local_data: IndexVec::new(),
guard_context: vec![],
push_unsafe_count: 0,
unpushed_unsafe: safety,
breakable_scopes: vec![],
local_decls: IndexVec::from_elem_n(
LocalDecl::new_return_place(return_ty, return_span),
1,
),
upvar_decls,
var_indices: Default::default(),
unit_temp: None,
cached_resume_block: None,
cached_return_block: None,
cached_unreachable_block: None,
};
assert_eq!(builder.cfg.start_new_block(), START_BLOCK);
assert_eq!(
builder.new_source_scope(span, lint_level, Some(safety)),
OUTERMOST_SOURCE_SCOPE);
builder.source_scopes[OUTERMOST_SOURCE_SCOPE].parent_scope = None;
builder
}
fn finish(self,
yield_ty: Option<Ty<'tcx>>)
-> Mir<'tcx> {
for (index, block) in self.cfg.basic_blocks.iter().enumerate() {
if block.terminator.is_none() {
span_bug!(self.fn_span, "no terminator on block {:?}", index);
}
}
Mir::new(self.cfg.basic_blocks,
self.source_scopes,
ClearCrossCrate::Set(self.source_scope_local_data),
IndexVec::new(),
yield_ty,
self.local_decls,
self.arg_count,
self.upvar_decls,
self.fn_span
)
}
fn args_and_body(&mut self,
mut block: BasicBlock,
arguments: &[ArgInfo<'gcx>],
argument_scope: region::Scope,
ast_body: &'gcx hir::Expr)
-> BlockAnd<()>
{
// Allocate locals for the function arguments
for &ArgInfo(ty, _, pattern, _) in arguments.iter() {
// If this is a simple binding pattern, give the local a name for
// debuginfo and so that error reporting knows that this is a user
// variable. For any other pattern the pattern introduces new
// variables which will be named instead.
let mut name = None;
if let Some(pat) = pattern {
match pat.node {
hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, _)
| hir::PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, _) => {
name = Some(ident.name);
}
_ => (),
}
}
let source_info = SourceInfo {
scope: OUTERMOST_SOURCE_SCOPE,
span: pattern.map_or(self.fn_span, |pat| pat.span)
};
self.local_decls.push(LocalDecl {
mutability: Mutability::Mut,
ty,
user_ty: UserTypeProjections::none(),
source_info,
visibility_scope: source_info.scope,
name,
internal: false,
is_user_variable: None,
is_block_tail: None,
});
}
let mut scope = None;
// Bind the argument patterns
for (index, arg_info) in arguments.iter().enumerate() {
// Function arguments always get the first Local indices after the return place
let local = Local::new(index + 1);
let place = Place::Local(local);
let &ArgInfo(ty, opt_ty_info, pattern, ref self_binding) = arg_info;
if let Some(pattern) = pattern {
let pattern = self.hir.pattern_from_hir(pattern);
let span = pattern.span;
match *pattern.kind {
// Don't introduce extra copies for simple bindings
PatternKind::Binding { mutability, var, mode: BindingMode::ByValue, .. } => {
self.local_decls[local].mutability = mutability;
self.local_decls[local].is_user_variable =
if let Some(kind) = self_binding {
Some(ClearCrossCrate::Set(BindingForm::ImplicitSelf(*kind)))
} else {
let binding_mode = ty::BindingMode::BindByValue(mutability.into());
Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
binding_mode,
opt_ty_info,
opt_match_place: Some((Some(place.clone()), span)),
pat_span: span,
})))
};
self.var_indices.insert(var, LocalsForNode::One(local));
}
_ => {
scope = self.declare_bindings(scope, ast_body.span,
LintLevel::Inherited, &[pattern.clone()],
matches::ArmHasGuard(false),
Some((Some(&place), span)));
unpack!(block = self.place_into_pattern(block, pattern, &place, false));
}
}
}
// Make sure we drop (parts of) the argument even when not matched on.
self.schedule_drop(
pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
argument_scope, &place, ty,
DropKind::Value { cached_block: CachedBlock::default() },
);
}
// Enter the argument pattern bindings source scope, if it exists.
if let Some(source_scope) = scope {
self.source_scope = source_scope;
}
let body = self.hir.mirror(ast_body);
self.into(&Place::Local(RETURN_PLACE), block, body)
}
fn get_unit_temp(&mut self) -> Place<'tcx> {
match self.unit_temp {
Some(ref tmp) => tmp.clone(),
None => {
let ty = self.hir.unit_ty();
let fn_span = self.fn_span;
let tmp = self.temp(ty, fn_span);
self.unit_temp = Some(tmp.clone());
tmp
}
}
}
fn return_block(&mut self) -> BasicBlock {
match self.cached_return_block {
Some(rb) => rb,
None => {
let rb = self.cfg.start_new_block();
self.cached_return_block = Some(rb);
rb
}
}
}
fn unreachable_block(&mut self) -> BasicBlock {
match self.cached_unreachable_block {
Some(ub) => ub,
None => {
let ub = self.cfg.start_new_block();
self.cached_unreachable_block = Some(ub);
ub
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Builder methods are broken up into modules, depending on what kind
// of thing is being lowered. Note that they use the `unpack` macro
// above extensively.
mod block;
mod cfg;
mod expr;
mod into;
mod matches;
mod misc;
mod scope;
| 37.412808 | 100 | 0.54245 |
4bfe6634bbe7169644c44acc67ffa23c78d72c7d | 208 | extern crate octoon;
fn main()
{
let mut motion = octoon::animation::open("./M.vmd").unwrap();
motion.clips[0].add_event(|name, value| println!("{:?}:{:?}", name, value));
motion.evaluate(7.5);
} | 26 | 80 | 0.605769 |
8f6d14d202fcf02496013d1c879c974c71b5cc45 | 1,129 | use crate::tools::IterScan;
#[codesnip::entry("bounded")]
pub use self::bounded::Bounded;
#[codesnip::entry("Complex")]
pub use self::complex::Complex;
#[codesnip::entry("discrete_steps")]
pub use self::discrete_steps::{DiscreteSteps, RangeBoundsExt};
#[codesnip::entry("float")]
pub use self::float::{Float, Float32, Float64};
#[codesnip::entry("integer")]
pub use self::integer::{BinaryRepr, ExtendedGcd, IntBase, Saturating, Signed, Unsigned, Wrapping};
pub use self::mint::*;
#[codesnip::entry("QuadDouble")]
pub use self::quad_double::QuadDouble;
#[codesnip::entry("zero_one")]
pub use self::zero_one::{One, Zero};
#[cfg_attr(nightly, codesnip::entry)]
mod bounded;
#[cfg_attr(nightly, codesnip::entry("Complex", include("zero_one", "scanner")))]
mod complex;
#[cfg_attr(nightly, codesnip::entry(include("bounded")))]
mod discrete_steps;
#[cfg_attr(nightly, codesnip::entry(include("zero_one")))]
mod float;
#[cfg_attr(nightly, codesnip::entry(include("zero_one", "bounded")))]
mod integer;
mod mint;
#[cfg_attr(nightly, codesnip::entry("QuadDouble"))]
mod quad_double;
#[cfg_attr(nightly, codesnip::entry)]
mod zero_one;
| 33.205882 | 98 | 0.728964 |
9b92c561db1f8c08e58bdc8d6e84af0c2a322dc5 | 4,595 | //-
// Copyright 2018 Jason Lingle
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(missing_docs, unsafe_code)]
//! Rusty-fork provides a way to "fork" unit tests into separate processes.
//!
//! There are a number of reasons to want to run some tests in isolated
//! processes:
//!
//! - When tests share a process, if any test causes the process to abort,
//! segfault, overflow the stack, etc., the entire test runner process dies. If
//! the test is in a subprocess, only the subprocess dies and the test runner
//! simply fails the test.
//!
//! - Isolating a test to a subprocess makes it possible to add a timeout to
//! the test and forcibly terminate it and produce a normal test failure.
//!
//! - Tests which need to interact with some inherently global property, such
//! as the current working directory, can do so without interfering with other
//! tests.
//!
//! This crate itself provides two things:
//!
//! - The [`rusty_fork_test!`](macro.rusty_fork_test.html) macro, which is a
//! simple way to wrap standard Rust tests to be run in subprocesses with
//! optional timeouts.
//!
//! - The [`fork`](fn.fork.html) function which can be used as a building block
//! to make other types of process isolation strategies.
//!
//! ## Quick Start
//!
//! If you just want to run normal Rust tests in isolated processes, getting
//! started is pretty quick.
//!
//! In `Cargo.toml`, add
//!
//! ```toml
//! [dev-dependencies]
//! rusty-fork = "*"
//! ```
//!
//! Then, you can simply wrap any test(s) to be isolated with the
//! [`rusty_fork_test!`](macro.rusty_fork_test.html) macro.
//!
//! ```rust
//! use rusty_forkfork::rusty_fork_test;
//!
//! rusty_fork_test! {
//! # /* NOREADME
//! #[test]
//! # NOREADME */
//! fn my_test() {
//! assert_eq!(2, 1 + 1);
//! }
//!
//! // more tests...
//! }
//! # // NOREADME
//! # fn main() { my_test(); } // NOREADME
//! ```
//!
//! For more advanced usage, have a look at the [`fork`](fn.fork.html)
//! function.
//!
//! ## How rusty-fork works
//!
//! Unix-style process forking isn't really viable within the standard Rust
//! test environment for a number of reasons.
//!
//! - While true process forking can be done on Windows, it's neither fast nor
//! reliable.
//!
//! - The Rust test environment is multi-threaded, so attempting to do anything
//! non-trivial after a process fork would result in undefined behaviour.
//!
//! Rusty-fork instead works by _spawning_ a fresh instance of the current
//! process, after adjusting the command-line to ensure that only the desired
//! test is entered. Some additional coordination establishes the parent/child
//! branches and (not quite seamlessly) integrates the child's output with the
//! test output capture system.
//!
//! Coordination between the processes is performed via environment variables,
//! since there is otherwise no way to pass parameters to a test.
//!
//! Since it needs to spawn new copies of the test runner executable,
//! rusty-fork does need to know about the meaning of every flag passed by the
//! user. If any unknown flags are encountered, forking will fail. Please do
//! not hesitate to file
//! [issues](https://github.com/AltSysrq/rusty-fork/issues) if rusty-fork fails
//! to recognise any valid flags passed to the test runner.
//!
//! It is possible to inform rusty-fork of new flags without patching by
//! setting environment variables. For example, if a new `--frob-widgets` flag
//! were added to the test runner, you could set `RUSTY_FORK_FLAG_FROB_WIDGETS`
//! to one of the following:
//!
//! - `pass` — Pass the flag (just the flag) to the child process
//! - `pass-arg` — Pass the flag and its following argument to the child process
//! - `drop` — Don't pass the flag to the child process
//! - `drop-arg` — Don't pass the flag to the child process, and ignore whatever
//! argument follows.
//!
//! In general, arguments that affect which tests are run should be dropped,
//! and others should be passed.
//!
//! <!-- ENDREADME -->
#[macro_use]
extern crate quick_error;
#[macro_use]
mod sugar;
#[macro_use]
pub mod fork_test;
mod child_wrapper;
mod cmdline;
mod error;
mod fork;
pub use crate::child_wrapper::{ChildWrapper, ExitStatusWrapper};
pub use crate::error::{Error, Result};
pub use crate::fork::fork;
pub use crate::sugar::RustyForkId;
| 34.810606 | 80 | 0.695539 |
e8816bbed4b240042e03a5c909d24da288fb27b4 | 3,594 | use super::error::*;
use super::PierResult;
use serde::{Deserialize, Serialize};
use snafu::ResultExt;
use std::fs::File;
use std::io::prelude::*;
use std::os::unix::fs::PermissionsExt;
use std::process::{Command, Output, Stdio};
use tempfile;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Script {
#[serde(skip)]
pub alias: String,
pub command: String,
pub description: Option<String>,
pub reference: Option<String>,
pub tags: Option<Vec<String>>,
}
impl Script {
pub fn has_shebang(&self) -> bool {
match self.command.lines().nth(0) {
Some(line) => line.starts_with("#!"),
None => false,
}
}
pub fn display_command(&self, display_full: bool, width: usize) -> &str {
match display_full {
true => &self.command,
false => {
match &self.command.lines().nth(0) {
Some(line) => {
match line.chars().count() {
c if c <= width => line,
c if c > width => &line[0..width],
_ => "",
}
}
None => &self.command,
}
}
}
}
/// Runs the script inline using something like sh -c "<script>" or python -c "<script."...
pub fn run_with_cli_interpreter(
&self,
interpreter: &Vec<String>,
args: Vec<String>,
) -> PierResult<Output> {
// First item in interpreter is the binary
let cmd = Command::new(&interpreter[0])
// The following items after the binary is any commandline args that are necessary.
.args(&interpreter[1..])
.arg(&self.command)
.arg(&self.alias)
.args(&args)
.stderr(Stdio::piped())
.spawn()
.context(CommandExec)?
.wait_with_output()
.context(CommandExec)?;
Ok(cmd)
}
/// First creates a temporary file and then executes the file before removing it.
pub fn run_with_shebang(&self, args: Vec<String>) -> PierResult<Output> {
// Creates a temp directory to place our tempfile inside.
let tmpdir = tempfile::Builder::new()
.prefix("pier")
.tempdir()
.context(ExecutableTempFileCreate)?;
let exec_file_path = tmpdir.path().join(&self.alias);
// Creating the file inside a closure is convenient because rust will automatically handle
// closing the file for us so we can go ahead and execute it after writing to it and setting the file permissions.
{
let mut exec_file = File::create(&exec_file_path).context(ExecutableTempFileCreate)?;
exec_file
.write(self.command.as_bytes())
.context(ExecutableTempFileCreate)?;
let mut permissions = exec_file
.metadata()
.context(ExecutableTempFileCreate)?
.permissions();
// Set the file permissions to allow read and execute for the current user.
permissions.set_mode(0o500);
exec_file
.set_permissions(permissions)
.context(ExecutableTempFileCreate)?;
}
let cmd = Command::new(exec_file_path)
.stderr(Stdio::piped())
.args(&args)
.spawn()
.context(CommandExec)?
.wait_with_output()
.context(CommandExec)?;
Ok(cmd)
}
}
| 32.672727 | 122 | 0.537563 |
d991038e92cc4876847c598a76799eabb4bdf7b9 | 546 | // <json-resp>
use actix_web::{get, web, Responder, Result};
use serde::Serialize;
#[derive(Serialize)]
struct MyObj {
name: String,
}
#[get("/a/{name}")]
async fn index(name: web::Path<String>) -> Result<impl Responder> {
let obj = MyObj {
name: name.to_string(),
};
Ok(web::Json(obj))
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
use actix_web::{App, HttpServer};
HttpServer::new(|| App::new().service(index))
.bind("127.0.0.1:8080")?
.run()
.await
}
// </json-resp>
| 19.5 | 67 | 0.569597 |
182464d9eab98bbcb22797979f4c0ea0f288a563 | 376 | mod stats;
use self::stats::Stats;
use crate::jcli_app::rest::Error;
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub enum Network {
/// Network information
Stats(Stats),
}
impl Network {
pub fn exec(self) -> Result<(), Error> {
match self {
Network::Stats(stats) => stats.exec(),
}
}
}
| 17.904762 | 50 | 0.601064 |
bb17cb48d9cd145db61ce4e592cf900c4c289cb4 | 21,515 | // Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::fmt::{self, Display};
use std::fs::File;
use std::io::{Read, Result as IoResult, Write};
use std::mem;
use std::net;
use std::num::ParseIntError;
use std::os::raw::*;
use std::os::unix::io::{AsRawFd, FromRawFd, RawFd};
use std::str::FromStr;
use libc::EPERM;
use base::Error as SysError;
use base::FileReadWriteVolatile;
use base::{
ioctl_with_mut_ref, ioctl_with_ref, ioctl_with_val, volatile_impl, AsRawDescriptor,
FromRawDescriptor, IoctlNr, RawDescriptor,
};
#[derive(Debug)]
pub enum Error {
/// Failed to create a socket.
CreateSocket(SysError),
/// Couldn't open /dev/net/tun.
OpenTun(SysError),
/// Unable to create tap interface.
CreateTap(SysError),
/// ioctl failed.
IoctlError(SysError),
}
pub type Result<T> = std::result::Result<T, Error>;
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Error::*;
match self {
CreateSocket(e) => write!(f, "failed to create a socket: {}", e),
OpenTun(e) => write!(f, "failed to open /dev/net/tun: {}", e),
CreateTap(e) => write!(f, "failed to create tap interface: {}", e),
IoctlError(e) => write!(f, "ioctl failed: {}", e),
}
}
}
impl Error {
pub fn sys_error(&self) -> SysError {
match *self {
Error::CreateSocket(e) => e,
Error::OpenTun(e) => e,
Error::CreateTap(e) => e,
Error::IoctlError(e) => e,
}
}
}
/// Create a sockaddr_in from an IPv4 address, and expose it as
/// an opaque sockaddr suitable for usage by socket ioctls.
fn create_sockaddr(ip_addr: net::Ipv4Addr) -> net_sys::sockaddr {
// IPv4 addresses big-endian (network order), but Ipv4Addr will give us
// a view of those bytes directly so we can avoid any endian trickiness.
let addr_in = net_sys::sockaddr_in {
sin_family: net_sys::AF_INET as u16,
sin_port: 0,
sin_addr: unsafe { mem::transmute(ip_addr.octets()) },
__pad: [0; 8usize],
};
unsafe { mem::transmute(addr_in) }
}
/// Extract the IPv4 address from a sockaddr. Assumes the sockaddr is a sockaddr_in.
fn read_ipv4_addr(addr: &net_sys::sockaddr) -> net::Ipv4Addr {
debug_assert_eq!(addr.sa_family as u32, net_sys::AF_INET);
// This is safe because sockaddr and sockaddr_in are the same size, and we've checked that
// this address is AF_INET.
let in_addr: net_sys::sockaddr_in = unsafe { mem::transmute(*addr) };
net::Ipv4Addr::from(in_addr.sin_addr.s_addr)
}
fn create_socket() -> Result<net::UdpSocket> {
// This is safe since we check the return value.
let sock = unsafe { libc::socket(libc::AF_INET, libc::SOCK_DGRAM, 0) };
if sock < 0 {
return Err(Error::CreateSocket(SysError::last()));
}
// This is safe; nothing else will use or hold onto the raw sock fd.
Ok(unsafe { net::UdpSocket::from_raw_fd(sock) })
}
#[derive(Debug)]
pub enum MacAddressError {
/// Invalid number of octets.
InvalidNumOctets(usize),
/// Failed to parse octet.
ParseOctet(ParseIntError),
}
impl Display for MacAddressError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::MacAddressError::*;
match self {
InvalidNumOctets(n) => write!(f, "invalid number of octets: {}", n),
ParseOctet(e) => write!(f, "failed to parse octet: {}", e),
}
}
}
/// An Ethernet mac address. This struct is compatible with the C `struct sockaddr`.
#[repr(C)]
#[derive(Clone, Copy)]
pub struct MacAddress {
family: net_sys::sa_family_t,
addr: [u8; 6usize],
__pad: [u8; 8usize],
}
impl MacAddress {
pub fn octets(&self) -> [u8; 6usize] {
self.addr
}
}
impl FromStr for MacAddress {
type Err = MacAddressError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let octets: Vec<&str> = s.split(':').collect();
if octets.len() != 6usize {
return Err(MacAddressError::InvalidNumOctets(octets.len()));
}
let mut result = MacAddress {
family: net_sys::ARPHRD_ETHER,
addr: [0; 6usize],
__pad: [0; 8usize],
};
for (i, octet) in octets.iter().enumerate() {
result.addr[i] = u8::from_str_radix(octet, 16).map_err(MacAddressError::ParseOctet)?;
}
Ok(result)
}
}
impl Display for MacAddress {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{:02X}:{:02X}:{:02X}:{:02X}:{:02X}:{:02X}",
self.addr[0], self.addr[1], self.addr[2], self.addr[3], self.addr[4], self.addr[5]
)
}
}
/// Handle for a network tap interface.
///
/// For now, this simply wraps the file descriptor for the tap device so methods
/// can run ioctls on the interface. The tap interface fd will be closed when
/// Tap goes out of scope, and the kernel will clean up the interface
/// automatically.
#[derive(Debug)]
pub struct Tap {
tap_file: File,
if_name: [c_char; 16usize],
if_flags: ::std::os::raw::c_short,
}
impl Tap {
pub unsafe fn from_raw_descriptor(fd: RawDescriptor) -> Result<Tap> {
let tap_file = File::from_raw_descriptor(fd);
// Get the interface name since we will need it for some ioctls.
let mut ifreq: net_sys::ifreq = Default::default();
let ret = ioctl_with_mut_ref(&tap_file, net_sys::TUNGETIFF(), &mut ifreq);
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(Tap {
tap_file,
if_name: ifreq.ifr_ifrn.ifrn_name,
if_flags: ifreq.ifr_ifru.ifru_flags,
})
}
fn create_tap_with_ifreq(ifreq: &mut net_sys::ifreq) -> Result<Tap> {
// Open calls are safe because we give a constant nul-terminated
// string and verify the result.
let fd = unsafe {
libc::open(
b"/dev/net/tun\0".as_ptr() as *const c_char,
libc::O_RDWR | libc::O_NONBLOCK | libc::O_CLOEXEC,
)
};
if fd < 0 {
return Err(Error::OpenTun(SysError::last()));
}
// We just checked that the fd is valid.
let tuntap = unsafe { File::from_raw_descriptor(fd) };
// ioctl is safe since we call it with a valid tap fd and check the return
// value.
let ret = unsafe { ioctl_with_mut_ref(&tuntap, net_sys::TUNSETIFF(), ifreq) };
if ret < 0 {
let error = SysError::last();
// In a non-root, test environment, we won't have permission to call this; allow
if !(cfg!(test) && error.errno() == EPERM) {
return Err(Error::CreateTap(error));
}
}
// Safe since only the name is accessed, and it's copied out.
Ok(Tap {
tap_file: tuntap,
if_name: unsafe { ifreq.ifr_ifrn.ifrn_name },
if_flags: unsafe { ifreq.ifr_ifru.ifru_flags },
})
}
}
pub trait TapT: FileReadWriteVolatile + Read + Write + AsRawDescriptor + Send + Sized {
/// Create a new tap interface. Set the `vnet_hdr` flag to true to allow offloading on this tap,
/// which will add an extra 12 byte virtio net header to incoming frames. Offloading cannot
/// be used if `vnet_hdr` is false.
/// set 'multi_vq' to ture, if tap have multi virt queue pairs
fn new(vnet_hdr: bool, multi_vq: bool) -> Result<Self>;
/// Change the origin tap into multiqueue taps, this means create other taps based on the
/// origin tap.
fn into_mq_taps(self, vq_pairs: u16) -> Result<Vec<Self>>;
/// Get the host-side IP address for the tap interface.
fn ip_addr(&self) -> Result<net::Ipv4Addr>;
/// Set the host-side IP address for the tap interface.
fn set_ip_addr(&self, ip_addr: net::Ipv4Addr) -> Result<()>;
/// Get the netmask for the tap interface's subnet.
fn netmask(&self) -> Result<net::Ipv4Addr>;
/// Set the netmask for the subnet that the tap interface will exist on.
fn set_netmask(&self, netmask: net::Ipv4Addr) -> Result<()>;
/// Get the mac address for the tap interface.
fn mac_address(&self) -> Result<MacAddress>;
/// Set the mac address for the tap interface.
fn set_mac_address(&self, mac_addr: MacAddress) -> Result<()>;
/// Set the offload flags for the tap interface.
fn set_offload(&self, flags: c_uint) -> Result<()>;
/// Enable the tap interface.
fn enable(&self) -> Result<()>;
/// Set the size of the vnet hdr.
fn set_vnet_hdr_size(&self, size: c_int) -> Result<()>;
fn get_ifreq(&self) -> net_sys::ifreq;
/// Get the interface flags
fn if_flags(&self) -> u32;
}
impl TapT for Tap {
fn new(vnet_hdr: bool, multi_vq: bool) -> Result<Tap> {
const TUNTAP_DEV_FORMAT: &[u8; 8usize] = b"vmtap%d\0";
// This is pretty messy because of the unions used by ifreq. Since we
// don't call as_mut on the same union field more than once, this block
// is safe.
let mut ifreq: net_sys::ifreq = Default::default();
unsafe {
let ifrn_name = ifreq.ifr_ifrn.ifrn_name.as_mut();
let name_slice = &mut ifrn_name[..TUNTAP_DEV_FORMAT.len()];
for (dst, src) in name_slice.iter_mut().zip(TUNTAP_DEV_FORMAT.iter()) {
*dst = *src as c_char;
}
ifreq.ifr_ifru.ifru_flags = (net_sys::IFF_TAP
| net_sys::IFF_NO_PI
| if vnet_hdr { net_sys::IFF_VNET_HDR } else { 0 })
as c_short;
if multi_vq {
ifreq.ifr_ifru.ifru_flags |= net_sys::IFF_MULTI_QUEUE as c_short;
}
}
Tap::create_tap_with_ifreq(&mut ifreq)
}
fn into_mq_taps(self, vq_pairs: u16) -> Result<Vec<Tap>> {
let mut taps: Vec<Tap> = Vec::new();
if vq_pairs <= 1 {
taps.push(self);
return Ok(taps);
}
// Add other socket into the origin tap interface
for _ in 0..vq_pairs - 1 {
let mut ifreq = self.get_ifreq();
let tap = Tap::create_tap_with_ifreq(&mut ifreq)?;
tap.enable()?;
taps.push(tap);
}
taps.insert(0, self);
Ok(taps)
}
fn ip_addr(&self) -> Result<net::Ipv4Addr> {
let sock = create_socket()?;
let mut ifreq = self.get_ifreq();
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret = unsafe {
ioctl_with_mut_ref(&sock, net_sys::sockios::SIOCGIFADDR as IoctlNr, &mut ifreq)
};
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
// We only access one field of the ifru union, hence this is safe.
let addr = unsafe { ifreq.ifr_ifru.ifru_addr };
Ok(read_ipv4_addr(&addr))
}
fn set_ip_addr(&self, ip_addr: net::Ipv4Addr) -> Result<()> {
let sock = create_socket()?;
let addr = create_sockaddr(ip_addr);
let mut ifreq = self.get_ifreq();
ifreq.ifr_ifru.ifru_addr = addr;
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret =
unsafe { ioctl_with_ref(&sock, net_sys::sockios::SIOCSIFADDR as IoctlNr, &ifreq) };
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(())
}
fn netmask(&self) -> Result<net::Ipv4Addr> {
let sock = create_socket()?;
let mut ifreq = self.get_ifreq();
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret = unsafe {
ioctl_with_mut_ref(
&sock,
net_sys::sockios::SIOCGIFNETMASK as IoctlNr,
&mut ifreq,
)
};
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
// We only access one field of the ifru union, hence this is safe.
let addr = unsafe { ifreq.ifr_ifru.ifru_netmask };
Ok(read_ipv4_addr(&addr))
}
fn set_netmask(&self, netmask: net::Ipv4Addr) -> Result<()> {
let sock = create_socket()?;
let addr = create_sockaddr(netmask);
let mut ifreq = self.get_ifreq();
ifreq.ifr_ifru.ifru_netmask = addr;
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret =
unsafe { ioctl_with_ref(&sock, net_sys::sockios::SIOCSIFNETMASK as IoctlNr, &ifreq) };
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(())
}
fn mac_address(&self) -> Result<MacAddress> {
let sock = create_socket()?;
let mut ifreq = self.get_ifreq();
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret = unsafe {
ioctl_with_mut_ref(
&sock,
net_sys::sockios::SIOCGIFHWADDR as IoctlNr,
&mut ifreq,
)
};
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
// We only access one field of the ifru union, hence this is safe.
// This is safe since the MacAddress struct is already sized to match the C sockaddr
// struct. The address family has also been checked.
Ok(unsafe { mem::transmute(ifreq.ifr_ifru.ifru_hwaddr) })
}
fn set_mac_address(&self, mac_addr: MacAddress) -> Result<()> {
let sock = create_socket()?;
let mut ifreq = self.get_ifreq();
// We only access one field of the ifru union, hence this is safe.
unsafe {
// This is safe since the MacAddress struct is already sized to match the C sockaddr
// struct.
ifreq.ifr_ifru.ifru_hwaddr = std::mem::transmute(mac_addr);
}
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret =
unsafe { ioctl_with_ref(&sock, net_sys::sockios::SIOCSIFHWADDR as IoctlNr, &ifreq) };
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(())
}
fn set_offload(&self, flags: c_uint) -> Result<()> {
// ioctl is safe. Called with a valid tap fd, and we check the return.
let ret =
unsafe { ioctl_with_val(&self.tap_file, net_sys::TUNSETOFFLOAD(), flags as c_ulong) };
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(())
}
fn enable(&self) -> Result<()> {
let sock = create_socket()?;
let mut ifreq = self.get_ifreq();
ifreq.ifr_ifru.ifru_flags =
(net_sys::net_device_flags_IFF_UP | net_sys::net_device_flags_IFF_RUNNING) as i16;
// ioctl is safe. Called with a valid sock fd, and we check the return.
let ret =
unsafe { ioctl_with_ref(&sock, net_sys::sockios::SIOCSIFFLAGS as IoctlNr, &ifreq) };
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(())
}
fn set_vnet_hdr_size(&self, size: c_int) -> Result<()> {
// ioctl is safe. Called with a valid tap fd, and we check the return.
let ret = unsafe { ioctl_with_ref(&self.tap_file, net_sys::TUNSETVNETHDRSZ(), &size) };
if ret < 0 {
return Err(Error::IoctlError(SysError::last()));
}
Ok(())
}
fn get_ifreq(&self) -> net_sys::ifreq {
let mut ifreq: net_sys::ifreq = Default::default();
// This sets the name of the interface, which is the only entry
// in a single-field union.
unsafe {
let ifrn_name = ifreq.ifr_ifrn.ifrn_name.as_mut();
ifrn_name.clone_from_slice(&self.if_name);
}
// This sets the flags with which the interface was created, which is the only entry we set
// on the second union.
ifreq.ifr_ifru.ifru_flags = self.if_flags;
ifreq
}
fn if_flags(&self) -> u32 {
self.if_flags as u32
}
}
impl Read for Tap {
fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {
self.tap_file.read(buf)
}
}
impl Write for Tap {
fn write(&mut self, buf: &[u8]) -> IoResult<usize> {
self.tap_file.write(&buf)
}
fn flush(&mut self) -> IoResult<()> {
Ok(())
}
}
impl AsRawFd for Tap {
fn as_raw_fd(&self) -> RawFd {
self.tap_file.as_raw_descriptor()
}
}
impl AsRawDescriptor for Tap {
fn as_raw_descriptor(&self) -> RawDescriptor {
self.tap_file.as_raw_descriptor()
}
}
volatile_impl!(Tap);
pub mod fakes {
use super::*;
use std::fs::remove_file;
use std::fs::OpenOptions;
const TMP_FILE: &str = "/tmp/crosvm_tap_test_file";
pub struct FakeTap {
tap_file: File,
}
impl TapT for FakeTap {
fn new(_: bool, _: bool) -> Result<FakeTap> {
Ok(FakeTap {
tap_file: OpenOptions::new()
.read(true)
.append(true)
.create(true)
.open(TMP_FILE)
.unwrap(),
})
}
fn into_mq_taps(self, _vq_pairs: u16) -> Result<Vec<FakeTap>> {
Ok(Vec::new())
}
fn ip_addr(&self) -> Result<net::Ipv4Addr> {
Ok(net::Ipv4Addr::new(1, 2, 3, 4))
}
fn set_ip_addr(&self, _: net::Ipv4Addr) -> Result<()> {
Ok(())
}
fn netmask(&self) -> Result<net::Ipv4Addr> {
Ok(net::Ipv4Addr::new(255, 255, 255, 252))
}
fn set_netmask(&self, _: net::Ipv4Addr) -> Result<()> {
Ok(())
}
fn mac_address(&self) -> Result<MacAddress> {
Ok("01:02:03:04:05:06".parse().unwrap())
}
fn set_mac_address(&self, _: MacAddress) -> Result<()> {
Ok(())
}
fn set_offload(&self, _: c_uint) -> Result<()> {
Ok(())
}
fn enable(&self) -> Result<()> {
Ok(())
}
fn set_vnet_hdr_size(&self, _: c_int) -> Result<()> {
Ok(())
}
fn get_ifreq(&self) -> net_sys::ifreq {
let ifreq: net_sys::ifreq = Default::default();
ifreq
}
fn if_flags(&self) -> u32 {
net_sys::IFF_TAP
}
}
impl Drop for FakeTap {
fn drop(&mut self) {
let _ = remove_file(TMP_FILE);
}
}
impl Read for FakeTap {
fn read(&mut self, _: &mut [u8]) -> IoResult<usize> {
Ok(0)
}
}
impl Write for FakeTap {
fn write(&mut self, _: &[u8]) -> IoResult<usize> {
Ok(0)
}
fn flush(&mut self) -> IoResult<()> {
Ok(())
}
}
impl AsRawFd for FakeTap {
fn as_raw_fd(&self) -> RawFd {
self.tap_file.as_raw_descriptor()
}
}
impl AsRawDescriptor for FakeTap {
fn as_raw_descriptor(&self) -> RawDescriptor {
self.tap_file.as_raw_descriptor()
}
}
volatile_impl!(FakeTap);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_mac_address() {
assert!("01:02:03:04:05:06".parse::<MacAddress>().is_ok());
assert!("01:06".parse::<MacAddress>().is_err());
assert!("01:02:03:04:05:06:07:08:09".parse::<MacAddress>().is_err());
assert!("not a mac address".parse::<MacAddress>().is_err());
}
#[test]
fn tap_create() {
Tap::new(true, false).unwrap();
}
#[test]
fn tap_configure() {
let tap = Tap::new(true, false).unwrap();
let ip_addr: net::Ipv4Addr = "100.115.92.5".parse().unwrap();
let netmask: net::Ipv4Addr = "255.255.255.252".parse().unwrap();
let mac_addr: MacAddress = "a2:06:b9:3d:68:4d".parse().unwrap();
let ret = tap.set_ip_addr(ip_addr);
assert_ok_or_perm_denied(ret);
let ret = tap.set_netmask(netmask);
assert_ok_or_perm_denied(ret);
let ret = tap.set_mac_address(mac_addr);
assert_ok_or_perm_denied(ret);
}
/// This test will only work if the test is run with root permissions and, unlike other tests
/// in this file, do not return PermissionDenied. They fail because the TAP FD is not
/// initialized (as opposed to permission denial). Run this with "cargo test -- --ignored".
#[test]
#[ignore]
fn root_only_tests() {
// This line will fail to provide an initialized FD if the test is not run as root.
let tap = Tap::new(true, false).unwrap();
tap.set_vnet_hdr_size(16).unwrap();
tap.set_offload(0).unwrap();
}
#[test]
fn tap_enable() {
let tap = Tap::new(true, false).unwrap();
let ret = tap.enable();
assert_ok_or_perm_denied(ret);
}
fn assert_ok_or_perm_denied<T>(res: Result<T>) {
match res {
// We won't have permission in test environments; allow that
Ok(_t) => {}
Err(Error::IoctlError(e)) if e.errno() == EPERM => {}
Err(e) => panic!("Unexpected Error:\n{}", e),
}
}
}
| 30.648148 | 100 | 0.56951 |
f5047b3b4ea387123481f13d44479d945fa87a17 | 3,033 | pub mod bindings;
mod function;
mod quotable;
mod value;
pub use self::function::*;
pub use self::value::*;
use proc_macro2::Span;
use syn::{spanned::Spanned, Ident, Lit, Meta, NestedMeta, Path, PathSegment};
pub fn last_segment_in_path(path: &Path) -> &PathSegment {
path.segments
.iter()
.last()
.expect("expected at least one segment in path")
}
pub fn iter_attribute_args<F>(args: &[NestedMeta], mut callback: F)
where
F: FnMut(&Ident, &Lit) -> bool,
{
for arg in args.iter() {
match arg {
NestedMeta::Meta(m) => {
match m {
Meta::NameValue(nvp) => {
if !callback(&nvp.ident, &nvp.lit) {
return;
}
}
_ => macro_panic(m.span(), "expected name-value pair for an argument"),
};
}
_ => macro_panic(arg.span(), "expected a name-vaule pair for an argument"),
};
}
}
pub fn get_string_value(name: &str, value: &Lit) -> String {
if let Lit::Str(s) = value {
return s.value();
}
macro_panic(
value.span(),
format!(
"expected a literal string value for the '{}' argument",
name
),
)
}
pub fn get_boolean_value(name: &str, value: &Lit) -> bool {
if let Lit::Bool(b) = value {
return b.value;
}
macro_panic(
value.span(),
format!(
"expected a literal boolean value for the '{}' argument",
name
),
)
}
pub fn get_integer_value(name: &str, value: &Lit) -> i64 {
if let Lit::Int(i) = value {
return i.value() as i64;
}
macro_panic(
value.span(),
format!(
"expected a literal integer value for the '{}' argument",
name
),
)
}
#[cfg(feature = "unstable")]
pub fn macro_panic<T>(span: Span, message: T) -> !
where
T: AsRef<str>,
{
span.unstable().error(message.as_ref()).emit();
panic!("aborting due to previous error");
}
#[cfg(not(feature = "unstable"))]
pub fn macro_panic<T>(_: Span, message: T) -> !
where
T: AsRef<str>,
{
panic!("{}", message.as_ref());
}
#[cfg(test)]
mod tests {
use std::panic::{catch_unwind, UnwindSafe};
pub fn should_panic<T>(callback: T, msg: &str)
where
T: FnOnce() + UnwindSafe,
{
let result = catch_unwind(|| callback());
assert!(result.is_err(), "the function did not panic");
if cfg!(feature = "unstable") {
assert_eq!(
result.unwrap_err().downcast_ref::<String>().unwrap(),
"aborting due to previous error",
"the panic message is not the expected one"
);
} else {
assert_eq!(
result.unwrap_err().downcast_ref::<String>().unwrap(),
msg,
"the panic message is not the expected one"
);
}
}
}
| 24.264 | 91 | 0.513683 |
f48b250692cbf6e2ddfcdf987e44b2e9d81b146d | 1,220 | use anyhow::Result;
use crate::{
identities::{
domain::password_resets::PasswordResetTokenData, models::password_resets::PasswordReset,
},
PostgresConn,
};
use super::{PasswordResetError, PasswordResetQueries};
pub struct PostgresQueries<'a>(pub &'a PostgresConn);
impl From<diesel::result::Error> for PasswordResetError {
fn from(error: diesel::result::Error) -> Self {
Self::Unknown(error.into())
}
}
#[async_trait]
impl<'a> PasswordResetQueries for PostgresQueries<'a> {
async fn get_password_reset(
&self,
provided_token: String,
) -> Result<PasswordResetTokenData, PasswordResetError> {
let reset = self
.0
.run::<_, Result<_, PasswordResetError>>(move |conn| {
use crate::schema::password_resets::dsl::*;
use diesel::prelude::*;
Ok(password_resets
.filter(token.eq(provided_token))
.get_result::<PasswordReset>(conn)
.optional()?)
})
.await?;
match reset {
Some(reset) => Ok(reset.into()),
None => Err(PasswordResetError::NotFound),
}
}
}
| 27.111111 | 96 | 0.57541 |
39128875dbebe2d9ef68a176a04b0b0740b6f63b | 2,786 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Debug interface to access information in a specific node.
use crate::{
json_log,
proto::{
node_debug_interface::{
DumpJemallocHeapProfileRequest, DumpJemallocHeapProfileResponse, Event,
GetEventsRequest, GetEventsResponse, GetNodeDetailsRequest, GetNodeDetailsResponse,
},
node_debug_interface_grpc::NodeDebugInterface,
},
};
use futures::Future;
use logger::prelude::*;
use metrics::counters::COUNTER_ADMISSION_CONTROL_CANNOT_SEND_REPLY;
#[derive(Clone, Default)]
pub struct NodeDebugService {}
impl NodeDebugService {
pub fn new() -> Self {
Default::default()
}
}
impl NodeDebugInterface for NodeDebugService {
fn get_node_details(
&mut self,
ctx: ::grpcio::RpcContext<'_>,
_req: GetNodeDetailsRequest,
sink: ::grpcio::UnarySink<GetNodeDetailsResponse>,
) {
info!("[GRPC] get_node_details");
let mut response = GetNodeDetailsResponse::new();
response.stats = metrics::get_all_metrics();
ctx.spawn(sink.success(response).map_err(default_reply_error_logger))
}
fn get_events(
&mut self,
ctx: ::grpcio::RpcContext<'_>,
_req: GetEventsRequest,
sink: ::grpcio::UnarySink<GetEventsResponse>,
) {
let mut response = GetEventsResponse::new();
for event in json_log::pop_last_entries() {
let mut response_event = Event::new();
response_event.set_name(event.name.to_string());
response_event.set_timestamp(event.timestamp as i64);
let serialized_event =
serde_json::to_string(&event.json).expect("Failed to serialize event to json");
response_event.set_json(serialized_event);
response.events.push(response_event);
}
ctx.spawn(sink.success(response).map_err(default_reply_error_logger))
}
fn dump_jemalloc_heap_profile(
&mut self,
ctx: ::grpcio::RpcContext<'_>,
_request: DumpJemallocHeapProfileRequest,
sink: ::grpcio::UnarySink<DumpJemallocHeapProfileResponse>,
) {
trace!("[GRPC] dump_jemalloc_heap_profile");
let status_code = match jemalloc::dump_jemalloc_memory_profile() {
Ok(_) => 0,
Err(err_code) => err_code,
};
let mut resp = DumpJemallocHeapProfileResponse::new();
resp.status_code = status_code;
let f = sink.success(resp).map_err(default_reply_error_logger);
ctx.spawn(f)
}
}
fn default_reply_error_logger<T: ::std::fmt::Debug>(e: T) {
COUNTER_ADMISSION_CONTROL_CANNOT_SEND_REPLY.inc();
error!("Failed to reply error due to {:?}", e)
}
| 33.566265 | 95 | 0.653625 |
64a612056c1b9e7de2288a1bf94123210e66165c | 15,068 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use std::borrow::Cow;
use std::io::Write;
use crate::errors::Result;
use crate::histogram::BUCKET_LABEL;
use crate::proto::{self, MetricFamily, MetricType};
use super::{check_metric_family, Encoder};
/// The text format of metric family.
pub const TEXT_FORMAT: &str = "text/plain; version=0.0.4";
const POSITIVE_INF: &str = "+Inf";
const QUANTILE: &str = "quantile";
/// An implementation of an [`Encoder`] that converts a [`MetricFamily`] proto message
/// into text format.
#[derive(Debug, Default)]
pub struct TextEncoder;
impl TextEncoder {
/// Create a new text encoder.
pub fn new() -> TextEncoder {
TextEncoder
}
}
impl Encoder for TextEncoder {
fn encode<W: Write>(&self, metric_families: &[MetricFamily], writer: &mut W) -> Result<()> {
for mf in metric_families {
// Fail-fast checks.
check_metric_family(mf)?;
// Write `# HELP` header.
let name = mf.get_name();
let help = mf.get_help();
if !help.is_empty() {
writer.write_all(b"# HELP ")?;
writer.write_all(name.as_bytes())?;
writer.write_all(b" ")?;
writer.write_all(escape_string(help, false).as_bytes())?;
writer.write_all(b"\n")?;
}
// Write `# TYPE` header.
let metric_type = mf.get_field_type();
let lowercase_type = format!("{:?}", metric_type).to_lowercase();
writer.write_all(b"# TYPE ")?;
writer.write_all(name.as_bytes())?;
writer.write_all(b" ")?;
writer.write_all(lowercase_type.as_bytes())?;
writer.write_all(b"\n")?;
for m in mf.get_metric() {
match metric_type {
MetricType::COUNTER => {
let value = m.get_counter().get_value();
let exemplar = m.get_counter().get_exemplar();
write_sample(writer, name, None, m, None, value, exemplar)?;
}
MetricType::GAUGE => {
write_sample(writer, name, None, m, None, m.get_gauge().get_value(), None)?;
}
MetricType::HISTOGRAM => {
let h = m.get_histogram();
let mut inf_seen = false;
for b in h.get_bucket() {
let upper_bound = b.get_upper_bound();
let exemplar = b.get_exemplar();
write_sample(
writer,
name,
Some("_bucket"),
m,
Some((BUCKET_LABEL, &upper_bound.to_string())),
b.get_cumulative_count() as f64,
exemplar,
)?;
if upper_bound.is_sign_positive() && upper_bound.is_infinite() {
inf_seen = true;
}
}
if !inf_seen {
write_sample(
writer,
name,
Some("_bucket"),
m,
Some((BUCKET_LABEL, POSITIVE_INF)),
h.get_sample_count() as f64,
None,
)?;
}
write_sample(
writer,
name,
Some("_sum"),
m,
None,
h.get_sample_sum(),
None,
)?;
write_sample(
writer,
name,
Some("_count"),
m,
None,
h.get_sample_count() as f64,
None,
)?;
}
MetricType::SUMMARY => {
let s = m.get_summary();
for q in s.get_quantile() {
write_sample(
writer,
name,
None,
m,
Some((QUANTILE, &q.get_quantile().to_string())),
q.get_value(),
None,
)?;
}
write_sample(
writer,
name,
Some("_sum"),
m,
None,
s.get_sample_sum(),
None,
)?;
write_sample(
writer,
name,
Some("_count"),
m,
None,
s.get_sample_count() as f64,
None,
)?;
}
MetricType::UNTYPED => {
unimplemented!();
}
}
}
}
writer.write_all(b"# EOF\n")?;
Ok(())
}
fn format_type(&self) -> &str {
TEXT_FORMAT
}
}
/// `write_sample` writes a single sample in text format to `writer`, given the
/// metric name, an optional metric name postfix, the metric proto message
/// itself, optionally an additional label name and value (use empty strings if
/// not required), and the value. The function returns the number of bytes
/// written and any error encountered.
fn write_sample(
writer: &mut dyn Write,
name: &str,
name_postfix: Option<&str>,
mc: &proto::Metric,
additional_label: Option<(&str, &str)>,
value: f64,
exemplar: Option<&proto::Exemplar>,
) -> Result<()> {
writer.write_all(name.as_bytes())?;
if let Some(postfix) = name_postfix {
writer.write_all(postfix.as_bytes())?;
}
label_pairs_to_text(mc.get_label(), additional_label, writer)?;
writer.write_all(b" ")?;
writer.write_all(value.to_string().as_bytes())?;
let timestamp = mc.get_timestamp_ms();
if timestamp != 0 {
writer.write_all(b" ")?;
writer.write_all(timestamp.to_string().as_bytes())?;
}
if let Some(ex) = exemplar {
write_exemplar(writer, ex)?;
}
writer.write_all(b"\n")?;
Ok(())
}
// Append a hash along with exemplar data if an exemplar is given
fn write_exemplar(writer: &mut dyn Write, ex: &proto::Exemplar) -> Result<()> {
// foo_bucket{le="10"} 17 # {trace_id="oHg5SJYRHA0"} 9.8 1520879607.789
writer.write_all(b" # ")?;
label_pairs_to_text(&ex.get_label(), None, writer)?;
writer.write_all(b" ")?;
writer.write_all(ex.get_value().to_string().as_bytes())?;
//let timestamp = ex.get_timestamp();
//if timestamp != 0.0 {
// writer.write_all(b" ")?;
// writer.write_all(timestamp.to_string().as_bytes())?;
//}
Ok(())
}
/// `label_pairs_to_text` converts a slice of `LabelPair` proto messages plus
/// the explicitly given additional label pair into text formatted as required
/// by the text format and writes it to `writer`. An empty slice in combination
/// with an empty string `additional_label_name` results in nothing being
/// written. Otherwise, the label pairs are written, escaped as required by the
/// text format, and enclosed in '{...}'. The function returns the number of
/// bytes written and any error encountered.
fn label_pairs_to_text(
pairs: &[proto::LabelPair],
additional_label: Option<(&str, &str)>,
writer: &mut dyn Write,
) -> Result<()> {
if pairs.is_empty() && additional_label.is_none() {
return Ok(());
}
let mut separator = b"{";
for lp in pairs {
writer.write_all(separator)?;
writer.write_all(lp.get_name().as_bytes())?;
writer.write_all(b"=\"")?;
writer.write_all(escape_string(lp.get_value(), true).as_bytes())?;
writer.write_all(b"\"")?;
separator = b",";
}
if let Some((name, value)) = additional_label {
writer.write_all(separator)?;
writer.write_all(name.as_bytes())?;
writer.write_all(b"=\"")?;
writer.write_all(escape_string(value, true).as_bytes())?;
writer.write_all(b"\"")?;
}
writer.write_all(b"}")?;
Ok(())
}
fn find_first_occurence(v: &str, include_double_quote: bool) -> Option<usize> {
if include_double_quote {
memchr::memchr3(b'\\', b'\n', b'\"', v.as_bytes())
} else {
memchr::memchr2(b'\\', b'\n', v.as_bytes())
}
}
/// `escape_string` replaces `\` by `\\`, new line character by `\n`, and `"` by `\"` if
/// `include_double_quote` is true.
///
/// Implementation adapted from
/// https://lise-henry.github.io/articles/optimising_strings.html
fn escape_string(v: &str, include_double_quote: bool) -> Cow<'_, str> {
let first_occurence = find_first_occurence(v, include_double_quote);
if let Some(first) = first_occurence {
let mut escaped = String::with_capacity(v.len() * 2);
escaped.push_str(&v[0..first]);
let remainder = v[first..].chars();
for c in remainder {
match c {
'\\' | '\n' => {
escaped.extend(c.escape_default());
}
'"' if include_double_quote => {
escaped.extend(c.escape_default());
}
_ => {
escaped.push(c);
}
}
}
escaped.shrink_to_fit();
escaped.into()
} else {
// The input string does not contain any characters that would need to
// be escaped. Return it as it is.
v.into()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::counter::Counter;
use crate::gauge::Gauge;
use crate::histogram::{Histogram, HistogramOpts};
use crate::metrics::{Collector, Opts};
#[test]
fn test_escape_string() {
assert_eq!(r"\\", escape_string("\\", false));
assert_eq!(r"a\\", escape_string("a\\", false));
assert_eq!(r"\n", escape_string("\n", false));
assert_eq!(r"a\n", escape_string("a\n", false));
assert_eq!(r"\\n", escape_string("\\n", false));
assert_eq!(r##"\\n\""##, escape_string("\\n\"", true));
assert_eq!(r##"\\\n\""##, escape_string("\\\n\"", true));
assert_eq!(r##"\\\\n\""##, escape_string("\\\\n\"", true));
assert_eq!(r##"\"\\n\""##, escape_string("\"\\n\"", true));
}
#[test]
fn test_text_encoder() {
let counter_opts = Opts::new("test_counter", "test help")
.const_label("a", "1")
.const_label("b", "2");
let counter = Counter::with_opts(counter_opts).unwrap();
counter.inc();
let mf = counter.collect();
let mut writer = Vec::<u8>::new();
let encoder = TextEncoder::new();
let txt = encoder.encode(&mf, &mut writer);
assert!(txt.is_ok());
let counter_ans = r##"# HELP test_counter test help
# TYPE test_counter counter
test_counter{a="1",b="2"} 1
"##;
assert_eq!(counter_ans.as_bytes(), writer.as_slice());
let gauge_opts = Opts::new("test_gauge", "test help")
.const_label("a", "1")
.const_label("b", "2");
let gauge = Gauge::with_opts(gauge_opts).unwrap();
gauge.inc();
gauge.set(42.0);
let mf = gauge.collect();
writer.clear();
let txt = encoder.encode(&mf, &mut writer);
assert!(txt.is_ok());
let gauge_ans = r##"# HELP test_gauge test help
# TYPE test_gauge gauge
test_gauge{a="1",b="2"} 42
"##;
assert_eq!(gauge_ans.as_bytes(), writer.as_slice());
}
#[test]
fn test_text_encoder_histogram() {
let opts = HistogramOpts::new("test_histogram", "test help").const_label("a", "1");
let histogram = Histogram::with_opts(opts).unwrap();
histogram.observe(0.25);
let mf = histogram.collect();
let mut writer = Vec::<u8>::new();
let encoder = TextEncoder::new();
let res = encoder.encode(&mf, &mut writer);
assert!(res.is_ok());
let ans = r##"# HELP test_histogram test help
# TYPE test_histogram histogram
test_histogram_bucket{a="1",le="0.005"} 0
test_histogram_bucket{a="1",le="0.01"} 0
test_histogram_bucket{a="1",le="0.025"} 0
test_histogram_bucket{a="1",le="0.05"} 0
test_histogram_bucket{a="1",le="0.1"} 0
test_histogram_bucket{a="1",le="0.25"} 1
test_histogram_bucket{a="1",le="0.5"} 1
test_histogram_bucket{a="1",le="1"} 1
test_histogram_bucket{a="1",le="2.5"} 1
test_histogram_bucket{a="1",le="5"} 1
test_histogram_bucket{a="1",le="10"} 1
test_histogram_bucket{a="1",le="+Inf"} 1
test_histogram_sum{a="1"} 0.25
test_histogram_count{a="1"} 1
"##;
assert_eq!(ans.as_bytes(), writer.as_slice());
}
#[test]
fn test_text_encoder_summary() {
use crate::proto::{Metric, Quantile, Summary};
use std::str;
let mut metric_family = MetricFamily::default();
metric_family.set_name("test_summary".to_string());
metric_family.set_help("This is a test summary statistic".to_string());
metric_family.set_field_type(MetricType::SUMMARY);
let mut summary = Summary::default();
summary.set_sample_count(5.0 as u64);
summary.set_sample_sum(15.0);
let mut quantile1 = Quantile::default();
quantile1.set_quantile(50.0);
quantile1.set_value(3.0);
let mut quantile2 = Quantile::default();
quantile2.set_quantile(100.0);
quantile2.set_value(5.0);
summary.set_quantile(from_vec!(vec!(quantile1, quantile2)));
let mut metric = Metric::default();
metric.set_summary(summary);
metric_family.set_metric(from_vec!(vec!(metric)));
let mut writer = Vec::<u8>::new();
let encoder = TextEncoder::new();
let res = encoder.encode(&vec![metric_family], &mut writer);
assert!(res.is_ok());
let ans = r##"# HELP test_summary This is a test summary statistic
# TYPE test_summary summary
test_summary{quantile="50"} 3
test_summary{quantile="100"} 5
test_summary_sum 15
test_summary_count 5
"##;
assert_eq!(ans, str::from_utf8(writer.as_slice()).unwrap());
}
}
| 33.936937 | 100 | 0.505309 |
096e2dae313a3431a81f9451efba97de93ae53bf | 10,103 | use async_std::future::ready;
use async_std::prelude::*;
use async_std::task;
use async_trait::async_trait;
use atcoder_problems_backend::server::GitHubUserResponse;
use atcoder_problems_backend::server::{run_server, Authentication};
use rand::Rng;
use serde_json::Value;
use sql_client::models::Submission;
use sql_client::PgPool;
use tide::Result;
pub mod utils;
#[derive(Clone)]
struct MockAuth;
#[async_trait]
impl Authentication for MockAuth {
async fn get_token(&self, _: &str) -> Result<String> {
unimplemented!()
}
async fn get_user_id(&self, _: &str) -> Result<GitHubUserResponse> {
unimplemented!()
}
}
async fn prepare_data_set(conn: &PgPool) {
sql_client::query(r"INSERT INTO accepted_count (user_id, problem_count) VALUES ('u1', 1)")
.execute(conn)
.await
.unwrap();
sql_client::query(r"INSERT INTO rated_point_sum (user_id, point_sum) VALUES ('u1', 1.0)")
.execute(conn)
.await
.unwrap();
sql_client::query(
r"
INSERT INTO
submissions (epoch_second, problem_id, contest_id, user_id, result, id, language, point, length)
VALUES
(0, 'p1', 'c1', 'u1', 'WA', 1, 'Rust', 0.0, 0),
(1, 'p1', 'c1', 'u1', 'RE', 2, 'Rust', 0.0, 0),
(2, 'p1', 'c1', 'u1', 'AC', 3, 'Rust', 0.0, 0),
(3, 'p1', 'c1', 'u1', 'AC', 4, 'Rust', 0.0, 0),
(100,'p1', 'c1', 'u1', 'AC', 5, 'Rust', 0.0, 0),
(4, 'p1', 'c1', 'u2', 'WA', 6, 'Rust', 0.0, 0),
(5, 'p1', 'c1', 'u2', 'RE', 7, 'Rust', 0.0, 0),
(6, 'p1', 'c1', 'u2', 'AC', 8, 'Rust', 0.0, 0),
(7, 'p1', 'c1', 'u2', 'AC', 9, 'Rust', 0.0, 0),
(200,'p1', 'c1', 'u2', 'AC', 10, 'Rust', 0.0, 0)",
)
.execute(conn)
.await
.unwrap();
}
fn url(path: &str, port: u16) -> String {
format!("http://localhost:{}{}", port, path)
}
async fn setup() -> u16 {
prepare_data_set(&utils::initialize_and_connect_to_test_sql().await).await;
let mut rng = rand::thread_rng();
rng.gen::<u16>() % 30000 + 30000
}
#[async_std::test]
async fn test_user_submissions() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let submissions: Vec<Submission> = surf::get(url("/atcoder-api/results?user=u1", port))
.await
.unwrap()
.body_json()
.await
.unwrap();
assert_eq!(submissions.len(), 5);
assert!(submissions.iter().all(|s| s.user_id.as_str() == "u1"));
let mut response = surf::get(url("/atcoder-api/results?user=u2", port))
.await
.unwrap();
let submissions: Vec<Submission> = response.body_json().await.unwrap();
assert_eq!(submissions.len(), 5);
assert!(submissions.iter().all(|s| s.user_id.as_str() == "u2"));
server.race(ready(())).await;
}
#[async_std::test]
async fn test_user_submissions_fromtime() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let submissions: Vec<Submission> = surf::get(url(
"/atcoder-api/v3/user/submissions?user=u1&from_second=3",
port,
))
.await
.unwrap()
.body_json()
.await
.unwrap();
assert_eq!(submissions.len(), 2);
assert!(submissions.iter().all(|s| s.user_id.as_str() == "u1"));
let mut response = surf::get(url(
"/atcoder-api/v3/user/submissions?user=u2&from_second=6",
port,
))
.await
.unwrap();
let submissions: Vec<Submission> = response.body_json().await.unwrap();
assert_eq!(submissions.len(), 3);
assert!(submissions.iter().all(|s| s.user_id.as_str() == "u2"));
assert_eq!(submissions[0].epoch_second, 6);
assert_eq!(submissions[1].epoch_second, 7);
assert_eq!(submissions[2].epoch_second, 200);
let mut response = surf::get(url(
"/atcoder-api/v3/user/submissions?user=u3&from_second=0",
port,
))
.await
.unwrap();
let submissions: Vec<Submission> = response.body_json().await.unwrap();
assert_eq!(submissions.len(), 0);
let mut response = surf::get(url(
"/atcoder-api/v3/user/submissions?user=u1&from_second=-30",
port,
))
.await
.unwrap();
let submissions: Vec<Submission> = response.body_json().await.unwrap();
assert_eq!(submissions.len(), 5);
let mut response = surf::get(url(
"/atcoder-api/v3/user/submissions?user=u2&from_second=3000",
port,
))
.await
.unwrap();
let submissions: Vec<Submission> = response.body_json().await.unwrap();
assert_eq!(submissions.len(), 0);
server.race(ready(())).await;
}
#[async_std::test]
async fn test_time_submissions() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let submissions: Vec<Submission> = surf::get(url("/atcoder-api/v3/from/100", port))
.await
.unwrap()
.body_json()
.await
.unwrap();
assert_eq!(submissions.len(), 2);
assert!(submissions.iter().all(|s| s.epoch_second >= 100));
server.race(ready(())).await;
}
#[async_std::test]
async fn test_submission_count() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let response: Value = surf::get(url(
r"/atcoder-api/v3/user/submission_count?user=u1&from_second=1&to_second=4",
port,
))
.await
.unwrap()
.body_json()
.await
.unwrap();
assert_eq!(response["count"], serde_json::json!(3));
let response: Value = surf::get(url(
r"/atcoder-api/v3/user/submission_count?user=u1&from_second=1&to_second=3",
port,
))
.await
.unwrap()
.body_json()
.await
.unwrap();
assert_eq!(response["count"], serde_json::json!(2));
server.race(ready(())).await;
}
#[async_std::test]
async fn test_invalid_path() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let response = surf::get(url("/atcoder-api/v3/from/", port)).await.unwrap();
assert_eq!(response.status(), 404);
let response = surf::get(url("/atcoder-api/results", port)).await.unwrap();
assert_eq!(response.status(), 400);
let response = surf::get(url("/", port)).await.unwrap();
assert_eq!(response.status(), 404);
server.race(ready(())).await;
}
#[async_std::test]
async fn test_health_check() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let response = surf::get(url("/healthcheck", port)).await.unwrap();
assert_eq!(response.status(), 200);
server.race(ready(())).await;
}
#[async_std::test]
async fn test_cors() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
assert_eq!(
surf::get(url("/atcoder-api/v3/from/100", port))
.await
.unwrap()
.header("access-control-allow-origin")
.unwrap(),
"*"
);
assert_eq!(
surf::get(url("/atcoder-api/v2/user_info?user=u1", port))
.await
.unwrap()
.header("access-control-allow-origin")
.unwrap(),
"*"
);
assert_eq!(
surf::get(url("/atcoder-api/results?user=u1", port))
.await
.unwrap()
.header("access-control-allow-origin")
.unwrap(),
"*"
);
server.race(ready(())).await;
}
#[async_std::test]
async fn test_users_and_time() {
let port = setup().await;
let server = task::spawn(async move {
let pg_pool = sql_client::initialize_pool(utils::get_sql_url_from_env())
.await
.unwrap();
run_server(pg_pool, MockAuth, port).await.unwrap();
});
task::sleep(std::time::Duration::from_millis(1000)).await;
let submissions: Vec<Submission> = surf::get(url(
"/atcoder-api/v3/users_and_time?users=u1,u2&problems=p1&from=100&to=200",
port,
))
.await
.unwrap()
.body_json()
.await
.unwrap();
assert_eq!(submissions.len(), 2);
assert_eq!(submissions.iter().filter(|s| &s.user_id == "u1").count(), 1);
assert_eq!(submissions.iter().filter(|s| &s.user_id == "u2").count(), 1);
server.race(ready(())).await;
}
| 31.182099 | 104 | 0.578541 |
de9d92c6800904f4f8f93481aa22bfcc9745f9f5 | 19,882 | // Copyright 2020 David Young
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
#![no_std]
#[cfg(any(feature = "std", test))]
extern crate std;
use core::convert::TryFrom;
pub use error::{Error, Result};
pub use node_types::*;
pub mod error;
pub mod node_types;
#[cfg(feature = "std")]
pub mod cmri_socket;
#[cfg(feature = "std")]
pub use cmri_socket::{CmriSocket, Duplex};
#[cfg(feature = "arduino")]
pub mod arduino;
#[cfg(feature = "arduino")]
pub use arduino::CmriProcessor;
/// This is the length calculated from
/// https://github.com/madleech/ArduinoCMRI/blob/master/CMRI.h
/// (64 i/o cards @ 32 bits each + packet type and address bytes)
//const RX_BUFFER_LEN: usize = 258;
pub const MAX_PAYLOAD_LEN: usize = 256;
/// * Payload is MAX_PAYLOAD_LEN
/// * Headers are 2x PREAMBLE and a START: 3
/// * Address and type: 2
/// * Trailers are 1x STOP: 1
/// * Then some unknown number of escape bytes, up to MAX_PAYLOAD_LEN
/// Implementations may be be able to get away with a smaller buffer if
/// memory is highly constrained
pub const TX_BUFFER_LEN: usize = 2 * MAX_PAYLOAD_LEN + 3 + 2 + 1;
const CMRI_PREAMBLE_BYTE: u8 = 0xff;
const CMRI_START_BYTE: u8 = 0x02;
const CMRI_STOP_BYTE: u8 = 0x03;
const CMRI_ESCAPE_BYTE: u8 = 0x10;
/// Possible states of the C/MRI system
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum CmriState {
Idle,
Attn,
Start,
Addr,
Type,
Data,
Escape,
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum MessageType {
/// Initialisation
Init = 'I' as isize,
/// Controller -> Node
Set = 'T' as isize,
/// Node -> Controller
Get = 'R' as isize,
/// Controller requests status from node
Poll = 'P' as isize,
}
impl TryFrom<u8> for MessageType {
type Error = Error;
fn try_from(t: u8) -> Result<Self> {
use MessageType::*;
match t as char {
'I' => Ok(Init),
'T' => Ok(Set),
'R' => Ok(Get),
'P' => Ok(Poll),
_ => Err(Error::InvalidMessageType),
}
}
}
impl core::fmt::Display for MessageType {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter<'_>,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{:?}", self)
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum RxState {
Listening,
Complete,
}
/// Main state machine, including decoding logic
pub struct CmriStateMachine {
state: CmriState,
message: CmriMessage,
/// If set, decoding will only accept messages directed at this
/// address and discard all others
address_filter: Option<u8>,
}
#[derive(Copy, Clone)]
pub struct CmriMessage {
pub address: Option<u8>,
pub message_type: Option<MessageType>,
pub payload: [u8; MAX_PAYLOAD_LEN],
pub len: usize,
}
impl CmriMessage {
pub fn new() -> Self {
Self {
address: None,
message_type: None,
payload: [0; MAX_PAYLOAD_LEN],
len: 0,
}
}
pub fn address(&mut self, addr: u8) -> &mut Self {
self.address = Some(addr);
self
}
pub fn payload(&mut self, payload: &[u8]) -> Result<&mut Self> {
payload_from_slice(&mut self.payload, payload)?;
Ok(self)
}
pub fn message_type(&mut self, t: MessageType) -> &mut Self {
self.message_type = Some(t);
self
}
/// Push a byte onto the payload
fn push(&mut self, byte: u8) -> Result<()> {
if self.len == MAX_PAYLOAD_LEN {
// Buffer is full, which is problematic
return Err(Error::DataTooLong);
}
self.payload[self.len] = byte;
self.len += 1;
Ok(())
}
/// Empty the rx buffer
fn clear(&mut self) {
self.len = 0;
self.payload.iter_mut().for_each(|x| *x = 0);
}
/// Encode the message into a transmit buffer
pub fn encode(&self, buf: &mut [u8; TX_BUFFER_LEN]) -> Result<()> {
let mut pos: usize = 0;
// Two PREAMBLEs
buf[pos] = CMRI_PREAMBLE_BYTE;
pos += 1;
buf[pos] = CMRI_PREAMBLE_BYTE;
pos += 1;
// One START
buf[pos] = CMRI_START_BYTE;
pos += 1;
// One ADDRESS
buf[pos] = self.address.ok_or(Error::MissingAddress)?;
pos += 1;
// One TYPE
buf[pos] = self.message_type.ok_or(Error::MissingType)? as u8;
pos += 1;
// Insert the PAYLOAD
for payload_byte in self.payload[..self.len].iter() {
if needs_escape(*payload_byte) {
buf[pos] = CMRI_ESCAPE_BYTE;
pos += 1;
}
buf[pos] = *payload_byte;
pos += 1;
}
// One STOP
buf[pos] = CMRI_STOP_BYTE;
//pos += 1;
Ok(())
}
}
impl CmriStateMachine {
pub fn new() -> Self {
Self {
state: CmriState::Idle,
message: CmriMessage::new(),
address_filter: None,
}
}
/// Returns the current state of the system
pub fn state(&self) -> CmriState {
self.state
}
/// Sets an address filter so that the state machine will only
/// accept messages targeted at us
pub fn filter(&mut self, addr: u8) {
self.address_filter = Some(addr);
}
/// Gets a reference to the decoded message
pub fn message(&self) -> &CmriMessage {
&self.message
}
pub fn clear(&mut self) {
self.message.clear();
self.state = CmriState::Idle;
}
/// Main process function. Takes in bytes off the wire and builds up
/// a message in the receive buffer
pub fn process(&mut self, byte: u8) -> Result<RxState> {
use CmriState::*;
match self.state {
Idle => {
// Idle to Attn if byte is PREAMBLE
if byte == CMRI_PREAMBLE_BYTE {
self.clear();
self.state = Attn;
}
// Ignore other bytes while Idle
}
Attn => {
// Attn to Start if byte is PREAMBLE
if byte == CMRI_PREAMBLE_BYTE {
self.state = Start;
} else {
// Otherwise discard and reset to Idle
self.clear();
}
}
Start => {
// start byte must be valid
if byte == CMRI_START_BYTE {
self.state = Addr;
} else {
// Otherwise discard and reset to Idle
self.clear();
}
}
Addr => {
// Take the next byte as-is for an address
if let Some(addr) = self.address_filter {
// A filter has been defined
if addr != byte {
// Not our address, discard the message
self.clear();
return Ok(RxState::Listening);
}
}
self.message.address = Some(byte);
self.state = Type;
}
Type => {
// Decode the message type and reset if it is invalid
if let Ok(mtype) = MessageType::try_from(byte) {
self.message.message_type = Some(mtype);
self.state = Data;
} else {
// Invalid message type; reset
self.clear();
}
}
Data => {
match byte {
CMRI_ESCAPE_BYTE => {
// escape the next byte; do not push the escape
// byte
//self.push(byte)?;
self.state = Escape;
}
CMRI_STOP_BYTE => {
// end transmission
self.state = Idle;
return Ok(RxState::Complete);
}
_ => {
// any other byte we take as data
if let Err(e) = self.message.push(byte) {
// Reset the state machine so that we can start afresh
self.clear();
return Err(e);
}
}
}
}
Escape => {
// Escape the next byte, so accept it as data.
if let Err(e) = self.message.push(byte) {
// Error writing message -> reset state machine
self.clear();
return Err(e);
}
self.state = Data;
}
}
Ok(RxState::Listening)
}
}
impl Default for CmriStateMachine {
fn default() -> Self {
Self::new()
}
}
impl Default for CmriMessage {
fn default() -> Self {
Self::new()
}
}
/// Returns TRUE if the byte is one which needs escaping; currently only
/// STOP and ESCAPE
fn needs_escape(byte: u8) -> bool {
byte == CMRI_STOP_BYTE || byte == CMRI_ESCAPE_BYTE
}
/// Takes a slice and embeds it in a payload array
pub fn payload_from_slice(
payload_buffer: &mut [u8; MAX_PAYLOAD_LEN],
input_payload: &[u8],
) -> Result<()> {
if input_payload.len() > MAX_PAYLOAD_LEN {
return Err(Error::DataTooLong);
}
for (src, dst) in input_payload.iter().zip(payload_buffer.iter_mut()) {
*dst = *src;
}
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
use CmriState::*;
use MessageType::*;
use RxState::*;
#[test]
fn basic_create_state_machine() {
let s = CmriStateMachine::new();
assert_eq!(s.state(), CmriState::Idle);
assert_eq!(s.message.payload.len(), MAX_PAYLOAD_LEN);
assert_eq!(s.message.len, 0);
}
#[test]
fn decode_first_preamble() {
// Create a state machine
let mut s = CmriStateMachine::new();
// Send junk
let res = s.process(0x05).unwrap();
assert_eq!(res, Listening);
// Send more junk
let res = s.process(0xfe).unwrap();
assert_eq!(res, Listening);
// Make sure the buffer hasn't recorded any of this
assert_eq!(s.message.len, 0);
assert_eq!(s.message.payload[0], 0);
assert_eq!(s.state, Idle);
// Send a preamble byte and check that the state has changed to Attn
let res = s.process(CMRI_PREAMBLE_BYTE).unwrap();
assert_eq!(res, Listening);
assert_eq!(s.state, Attn);
assert_eq!(s.message.len, 0); // preamble does not get saved
}
#[test]
fn decode_second_preamble() {
// Create a state machine and send two preamble bytes
let mut s = CmriStateMachine::new();
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Attn);
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Start);
// Create a new state machine and send one preamble followed by junk
let mut s = CmriStateMachine::new();
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Attn);
let res = s.process(0x31);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Idle);
}
#[test]
fn decode_start_byte() {
// Create a state machine and send two preamble bytes
let mut s = CmriStateMachine::new();
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Attn);
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Start);
// Send a start byte and check that we're in address mode
let res = s.process(CMRI_START_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Addr);
// Create a state machine and send two preamble bytes
let mut s = CmriStateMachine::new();
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Attn);
let res = s.process(CMRI_PREAMBLE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Start);
// Send junk instead of a start byte
let res = s.process(0x32);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Idle);
assert_eq!(s.message.len, 0);
}
// Skip Addr and Type because they can each be any byte
#[test]
fn decode_escape_byte() {
let mut s = get_to_data_section(0x43).unwrap();
// Normal message byte
let res = s.process(5);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Data);
// Escape byte, should not advance the position
let pos = s.message.len;
let res = s.process(CMRI_ESCAPE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Escape);
assert_eq!(s.message.len, pos);
// Send an escape byte again, should be escaped and state back
// to accepting data
let res = s.process(CMRI_ESCAPE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Data);
assert_eq!(s.message.len, pos + 1);
// Escape byte, should not advance the position
let pos = s.message.len;
let res = s.process(CMRI_ESCAPE_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Escape);
assert_eq!(s.message.len, pos);
// Send a stop byte, should be escaped and state back
// to accepting data
let res = s.process(CMRI_STOP_BYTE);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Data);
assert_eq!(s.message.len, pos + 1);
}
#[test]
fn decode_stop_byte() {
let mut s = get_to_data_section(0x05).unwrap();
// Normal message byte
let res = s.process(5);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Data);
// Stop byte, should trigger the end of message stuff
let res = s.process(CMRI_STOP_BYTE);
assert_eq!(res, Ok(Complete));
assert_eq!(s.state, Idle);
}
#[test]
fn address_filter() {
// Initial check to see that no-filter works
let s = get_to_data_section(0x06).unwrap();
assert_eq!(s.state, Data);
// Make a state machine with a filter
let mut s = CmriStateMachine::new();
s.filter(0x64);
// Send the same address
s.process(CMRI_PREAMBLE_BYTE).unwrap();
s.process(CMRI_PREAMBLE_BYTE).unwrap();
s.process(CMRI_START_BYTE).unwrap();
let res = s.process(0x64);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Type);
// Make a new state machine
let mut s = CmriStateMachine::new();
s.filter(0x64);
assert!(s.address_filter.is_some());
// Send a different address
s.process(CMRI_PREAMBLE_BYTE).unwrap();
s.process(CMRI_PREAMBLE_BYTE).unwrap();
s.process(CMRI_START_BYTE).unwrap();
let res = s.process(0x65);
assert_eq!(res, Ok(Listening));
assert_eq!(s.state, Idle);
assert_eq!(s.message.len, 0);
}
#[test]
fn decode_full_message() {
#[rustfmt::skip]
let message = [
CMRI_PREAMBLE_BYTE,
CMRI_PREAMBLE_BYTE,
CMRI_START_BYTE,
0x86, // Address
Init as u8, // Type
0x41, 0x41, 0x41, 0x41, // Message
CMRI_STOP_BYTE,
];
#[rustfmt::skip]
let message2 = [
CMRI_PREAMBLE_BYTE,
CMRI_PREAMBLE_BYTE,
CMRI_START_BYTE,
0xa2, // Address
Init as u8, // Type
0x41, 0x41, 0x41, 0x41, // Message
CMRI_STOP_BYTE,
];
let mut s = CmriStateMachine::new();
// Decode the message
for byte in message.iter() {
s.process(*byte).unwrap();
}
let m = s.message();
assert_eq!(m.address, Some(0x86));
assert_eq!(m.message_type, Some(Init));
assert_eq!(m.payload[..(m.len)], [0x41, 0x41, 0x41, 0x41]);
// Enable a filter
s.filter(0x86);
// Decode the message, excluding final stop byte
for byte in message[..message.len() - 1].iter() {
s.process(*byte).unwrap();
}
// Decode the final stop byte, capturing the response code
let res = s.process(message[message.len() - 1]);
assert_eq!(res, Ok(Complete));
let m = s.message();
assert_eq!(m.address, Some(0x86));
assert_eq!(m.message_type, Some(Init));
assert_eq!(m.payload[..(m.len)], [0x41, 0x41, 0x41, 0x41]);
// Decode the message
for byte in message2.iter() {
s.process(*byte).unwrap();
}
let m = s.message();
assert_eq!(m.len, 0);
}
#[test]
fn buffer_overrun() {
let mut s = CmriStateMachine::new();
// Cheekily force the buffer to be "full"
// Note that this is not possible for a library user because the
// `position` member variable is private
s.message.len = MAX_PAYLOAD_LEN - 3;
s.message.push(3).unwrap();
s.message.push(2).unwrap();
s.message.push(1).unwrap();
let res = s.message.push(0);
assert_eq!(res, Err(Error::DataTooLong));
}
#[test]
fn encode_a_message() {
let mut payload_buffer = [0_u8; MAX_PAYLOAD_LEN];
payload_from_slice(&mut payload_buffer, &[0x41, 0x41, 0x43]).unwrap();
let m = CmriMessage {
address: Some(0x58),
message_type: Some(Set),
payload: payload_buffer,
len: 3,
};
let mut tx_buffer = [0_u8; TX_BUFFER_LEN];
m.encode(&mut tx_buffer).unwrap();
assert_eq!(
tx_buffer[..9],
[
CMRI_PREAMBLE_BYTE,
CMRI_PREAMBLE_BYTE,
CMRI_START_BYTE,
0x58, // Address
Set as u8, // Type
0x41,
0x41,
0x43,
CMRI_STOP_BYTE,
]
);
}
#[test]
fn encode_a_worst_case_message() {}
#[test]
fn test_payload_from_slice() {
let mut payload_buffer = [0_u8; MAX_PAYLOAD_LEN];
let input = [1_u8, 2, 3];
// Test a valid input
payload_from_slice(&mut payload_buffer, &input).unwrap();
assert_eq!(payload_buffer[..input.len()], input);
assert_eq!(payload_buffer[input.len()], 0);
// Test a too-long input
let input = [5_u8; MAX_PAYLOAD_LEN + 1];
let res = payload_from_slice(&mut payload_buffer, &input);
assert_eq!(res, Err(Error::DataTooLong));
}
/// Utility function to produce a state machine in the "accepting
/// data" state to make testing later states easier
fn get_to_data_section(addr: u8) -> Result<CmriStateMachine> {
let mut s = CmriStateMachine::new();
s.process(CMRI_PREAMBLE_BYTE)?;
s.process(CMRI_PREAMBLE_BYTE)?;
s.process(CMRI_START_BYTE)?;
s.process(addr)?; // Address
s.process(Init as u8)?; // Message type
Ok(s)
}
}
| 29.498516 | 82 | 0.537974 |
289bf4076c693f768cdf1355aa5551342ee89a4f | 1,401 | // Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::num::NonZeroI32;
use poem::web::Html;
use poem::IntoResponse;
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct PProfRequest {
#[serde(default = "PProfRequest::default_seconds")]
pub(crate) seconds: u64,
#[serde(default = "PProfRequest::default_frequency")]
pub(crate) frequency: NonZeroI32,
}
impl PProfRequest {
pub(crate) fn default_seconds() -> u64 {
5
}
pub(crate) fn default_frequency() -> NonZeroI32 {
NonZeroI32::new(99).unwrap()
}
}
// return home page for default pprof results
#[poem::handler]
pub async fn debug_home_handler() -> impl IntoResponse {
return Html(format!(
r#"<a href="/debug/pprof/profile?seconds={}">pprof/profile</a>"#,
PProfRequest::default_seconds()
))
.into_response();
}
| 30.456522 | 75 | 0.694504 |
4877323ffa9120253e69f60f547d7e3bfee76cc5 | 4,769 | use std::fmt;
use style::Style;
/// Styles have a special `Debug` implementation that only shows the fields that
/// are set. Fields that haven’t been touched aren’t included in the output.
///
/// This behaviour gets bypassed when using the alternate formatting mode
/// `format!("{:#?}")`.
///
/// use ansi_term::Colour::{Red, Blue};
/// assert_eq!("Style { fg(Red), on(Blue), bold, italic }",
/// format!("{:?}", Red.on(Blue).bold().italic()));
impl fmt::Debug for Style {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if fmt.alternate() {
fmt.debug_struct("Style")
.field("foreground", &self.foreground)
.field("background", &self.background)
.field("blink", &self.is_blink)
.field("bold", &self.is_bold)
.field("dimmed", &self.is_dimmed)
.field("hidden", &self.is_hidden)
.field("italic", &self.is_italic)
.field("reverse", &self.is_reverse)
.field("strikethrough", &self.is_strikethrough)
.field("underline", &self.is_underline)
.finish()
}
else if self.is_plain() {
fmt.write_str("Style {}")
}
else {
fmt.write_str("Style { ")?;
let mut written_anything = false;
if let Some(fg) = self.foreground {
if written_anything { fmt.write_str(", ")? }
written_anything = true;
write!(fmt, "fg({:?})", fg)?
}
if let Some(bg) = self.background {
if written_anything { fmt.write_str(", ")? }
written_anything = true;
write!(fmt, "on({:?})", bg)?
}
{
let mut write_flag = |name| {
if written_anything { fmt.write_str(", ")? }
written_anything = true;
fmt.write_str(name)
};
if self.is_blink { write_flag("blink")? }
if self.is_bold { write_flag("bold")? }
if self.is_dimmed { write_flag("dimmed")? }
if self.is_hidden { write_flag("hidden")? }
if self.is_italic { write_flag("italic")? }
if self.is_reverse { write_flag("reverse")? }
if self.is_strikethrough { write_flag("strikethrough")? }
if self.is_underline { write_flag("underline")? }
}
write!(fmt, " }}")
}
}
}
#[cfg(test)]
mod test {
use style::Colour::*;
use style::Style;
fn style() -> Style {
Style::new()
}
macro_rules! test {
($name: ident: $obj: expr => $result: expr) => {
#[test]
fn $name() {
assert_eq!($result, format!("{:?}", $obj));
}
};
}
test!(empty: style() => "Style {}");
test!(bold: style().bold() => "Style { bold }");
test!(italic: style().italic() => "Style { italic }");
test!(both: style().bold().italic() => "Style { bold, italic }");
test!(red: Red.normal() => "Style { fg(Red) }");
test!(redblue: Red.normal().on(RGB(3, 2, 4)) => "Style { fg(Red), on(RGB(3, 2, 4)) }");
test!(everything:
Red.on(Blue).blink().bold().dimmed().hidden().italic().reverse().strikethrough().underline() =>
"Style { fg(Red), on(Blue), blink, bold, dimmed, hidden, italic, reverse, strikethrough, underline }");
#[test]
fn long_and_detailed() {
extern crate regex;
let expected_debug = "Style { fg(Blue), bold }";
let expected_pretty_repat = r##"(?x)
Style\s+\{\s+
foreground:\s+Some\(\s+
Blue,?\s+
\),\s+
background:\s+None,\s+
blink:\s+false,\s+
bold:\s+true,\s+
dimmed:\s+false,\s+
hidden:\s+false,\s+
italic:\s+false,\s+
reverse:\s+false,\s+
strikethrough:\s+
false,\s+
underline:\s+false,?\s+
\}"##;
let re = regex::Regex::new(expected_pretty_repat).unwrap();
let style = Blue.bold();
let style_fmt_debug = format!("{:?}", style);
let style_fmt_pretty = format!("{:#?}", style);
println!("style_fmt_debug:\n{}", style_fmt_debug);
println!("style_fmt_pretty:\n{}", style_fmt_pretty);
assert_eq!(expected_debug, style_fmt_debug);
assert!(re.is_match(&style_fmt_pretty));
}
}
| 35.325926 | 115 | 0.474942 |
ab4275bb6531505be413fb90bdeae4086f022c72 | 109,693 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Defines the logical data types of Arrow arrays.
//!
//! The most important things you might be looking for are:
//! * [`Schema`](crate::datatypes::Schema) to describe a schema.
//! * [`Field`](crate::datatypes::Field) to describe one field within a schema.
//! * [`DataType`](crate::datatypes::DataType) to describe the type of a field.
use std::collections::HashMap;
use std::default::Default;
use std::fmt;
use std::mem::size_of;
use std::ops::Neg;
#[cfg(feature = "simd")]
use std::ops::{Add, BitAnd, BitAndAssign, BitOr, BitOrAssign, Div, Mul, Not, Sub};
use std::slice::from_raw_parts;
use std::str::FromStr;
use std::sync::Arc;
#[cfg(feature = "simd")]
use packed_simd::*;
use serde_derive::{Deserialize, Serialize};
use serde_json::{
json, Number, Value, Value::Number as VNumber, Value::String as VString,
};
use crate::error::{ArrowError, Result};
/// The set of datatypes that are supported by this implementation of Apache Arrow.
///
/// The Arrow specification on data types includes some more types.
/// See also [`Schema.fbs`](https://github.com/apache/arrow/blob/master/format/Schema.fbs)
/// for Arrow's specification.
///
/// The variants of this enum include primitive fixed size types as well as parametric or
/// nested types.
/// Currently the Rust implementation supports the following nested types:
/// - `List<T>`
/// - `Struct<T, U, V, ...>`
///
/// Nested types can themselves be nested within other arrays.
/// For more information on these types please see
/// [the physical memory layout of Apache Arrow](https://arrow.apache.org/docs/format/Columnar.html#physical-memory-layout).
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum DataType {
/// Null type
Null,
/// A boolean datatype representing the values `true` and `false`.
Boolean,
/// A signed 8-bit integer.
Int8,
/// A signed 16-bit integer.
Int16,
/// A signed 32-bit integer.
Int32,
/// A signed 64-bit integer.
Int64,
/// An unsigned 8-bit integer.
UInt8,
/// An unsigned 16-bit integer.
UInt16,
/// An unsigned 32-bit integer.
UInt32,
/// An unsigned 64-bit integer.
UInt64,
/// A 16-bit floating point number.
Float16,
/// A 32-bit floating point number.
Float32,
/// A 64-bit floating point number.
Float64,
/// A timestamp with an optional timezone.
///
/// Time is measured as a Unix epoch, counting the seconds from
/// 00:00:00.000 on 1 January 1970, excluding leap seconds,
/// as a 64-bit integer.
///
/// The time zone is a string indicating the name of a time zone, one of:
///
/// * As used in the Olson time zone database (the "tz database" or
/// "tzdata"), such as "America/New_York"
/// * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30
Timestamp(TimeUnit, Option<Arc<String>>),
/// A 32-bit date representing the elapsed time since UNIX epoch (1970-01-01)
/// in days (32 bits).
Date32(DateUnit),
/// A 64-bit date representing the elapsed time since UNIX epoch (1970-01-01)
/// in milliseconds (64 bits).
Date64(DateUnit),
/// A 32-bit time representing the elapsed time since midnight in the unit of `TimeUnit`.
Time32(TimeUnit),
/// A 64-bit time representing the elapsed time since midnight in the unit of `TimeUnit`.
Time64(TimeUnit),
/// Measure of elapsed time in either seconds, milliseconds, microseconds or nanoseconds.
Duration(TimeUnit),
/// A "calendar" interval which models types that don't necessarily
/// have a precise duration without the context of a base timestamp (e.g.
/// days can differ in length during day light savings time transitions).
Interval(IntervalUnit),
/// Opaque binary data of variable length.
Binary,
/// Opaque binary data of fixed size.
/// Enum parameter specifies the number of bytes per value.
FixedSizeBinary(i32),
/// Opaque binary data of variable length and 64-bit offsets.
LargeBinary,
/// A variable-length string in Unicode with UTF-8 encoding.
Utf8,
/// A variable-length string in Unicode with UFT-8 encoding and 64-bit offsets.
LargeUtf8,
/// A list of some logical data type with variable length.
List(Box<Field>),
/// A list of some logical data type with fixed length.
FixedSizeList(Box<Field>, i32),
/// A list of some logical data type with variable length and 64-bit offsets.
LargeList(Box<Field>),
/// A nested datatype that contains a number of sub-fields.
Struct(Vec<Field>),
/// A nested datatype that can represent slots of differing types.
Union(Vec<Field>),
/// A dictionary encoded array (`key_type`, `value_type`), where
/// each array element is an index of `key_type` into an
/// associated dictionary of `value_type`.
///
/// Dictionary arrays are used to store columns of `value_type`
/// that contain many repeated values using less memory, but with
/// a higher CPU overhead for some operations.
///
/// This type mostly used to represent low cardinality string
/// arrays or a limited set of primitive types as integers.
Dictionary(Box<DataType>, Box<DataType>),
/// Decimal value with precision and scale
Decimal(usize, usize),
}
/// Date is either a 32-bit or 64-bit type representing elapsed time since UNIX
/// epoch (1970-01-01) in days or milliseconds.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum DateUnit {
/// Days since the UNIX epoch.
Day,
/// Milliseconds indicating UNIX time elapsed since the epoch (no
/// leap seconds), where the values are evenly divisible by 86400000.
Millisecond,
}
/// An absolute length of time in seconds, milliseconds, microseconds or nanoseconds.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum TimeUnit {
/// Time in seconds.
Second,
/// Time in milliseconds.
Millisecond,
/// Time in microseconds.
Microsecond,
/// Time in nanoseconds.
Nanosecond,
}
/// YEAR_MONTH or DAY_TIME interval in SQL style.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum IntervalUnit {
/// Indicates the number of elapsed whole months, stored as 4-byte integers.
YearMonth,
/// Indicates the number of elapsed days and milliseconds,
/// stored as 2 contiguous 32-bit integers (8-bytes in total).
DayTime,
}
/// Contains the meta-data for a single relative type.
///
/// The `Schema` object is an ordered collection of `Field` objects.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Field {
name: String,
data_type: DataType,
nullable: bool,
dict_id: i64,
dict_is_ordered: bool,
}
pub trait ArrowNativeType:
fmt::Debug + Send + Sync + Copy + PartialOrd + FromStr + Default + 'static
{
fn into_json_value(self) -> Option<Value>;
/// Convert native type from usize.
fn from_usize(_: usize) -> Option<Self> {
None
}
/// Convert native type to usize.
fn to_usize(&self) -> Option<usize> {
None
}
}
/// Trait indicating a primitive fixed-width type (bool, ints and floats).
pub trait ArrowPrimitiveType: 'static {
/// Corresponding Rust native type for the primitive type.
type Native: ArrowNativeType;
/// the corresponding Arrow data type of this primitive type.
const DATA_TYPE: DataType;
/// Returns the byte width of this primitive type.
fn get_byte_width() -> usize {
size_of::<Self::Native>()
}
/// Returns a default value of this primitive type.
///
/// This is useful for aggregate array ops like `sum()`, `mean()`.
fn default_value() -> Self::Native {
Default::default()
}
}
impl ArrowNativeType for bool {
fn into_json_value(self) -> Option<Value> {
Some(self.into())
}
}
impl ArrowNativeType for i8 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for i16 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for i32 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for i64 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u8 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u16 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u32 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u64 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for f32 {
fn into_json_value(self) -> Option<Value> {
Number::from_f64(f64::round(self as f64 * 1000.0) / 1000.0).map(VNumber)
}
}
impl ArrowNativeType for f64 {
fn into_json_value(self) -> Option<Value> {
Number::from_f64(self).map(VNumber)
}
}
// BooleanType is special: its bit-width is not the size of the primitive type, and its `index`
// operation assumes bit-packing.
#[derive(Debug)]
pub struct BooleanType {}
impl BooleanType {
pub const DATA_TYPE: DataType = DataType::Boolean;
}
macro_rules! make_type {
($name:ident, $native_ty:ty, $data_ty:expr) => {
#[derive(Debug)]
pub struct $name {}
impl ArrowPrimitiveType for $name {
type Native = $native_ty;
const DATA_TYPE: DataType = $data_ty;
}
};
}
make_type!(Int8Type, i8, DataType::Int8);
make_type!(Int16Type, i16, DataType::Int16);
make_type!(Int32Type, i32, DataType::Int32);
make_type!(Int64Type, i64, DataType::Int64);
make_type!(UInt8Type, u8, DataType::UInt8);
make_type!(UInt16Type, u16, DataType::UInt16);
make_type!(UInt32Type, u32, DataType::UInt32);
make_type!(UInt64Type, u64, DataType::UInt64);
make_type!(Float32Type, f32, DataType::Float32);
make_type!(Float64Type, f64, DataType::Float64);
make_type!(
TimestampSecondType,
i64,
DataType::Timestamp(TimeUnit::Second, None)
);
make_type!(
TimestampMillisecondType,
i64,
DataType::Timestamp(TimeUnit::Millisecond, None)
);
make_type!(
TimestampMicrosecondType,
i64,
DataType::Timestamp(TimeUnit::Microsecond, None)
);
make_type!(
TimestampNanosecondType,
i64,
DataType::Timestamp(TimeUnit::Nanosecond, None)
);
make_type!(Date32Type, i32, DataType::Date32(DateUnit::Day));
make_type!(Date64Type, i64, DataType::Date64(DateUnit::Millisecond));
make_type!(Time32SecondType, i32, DataType::Time32(TimeUnit::Second));
make_type!(
Time32MillisecondType,
i32,
DataType::Time32(TimeUnit::Millisecond)
);
make_type!(
Time64MicrosecondType,
i64,
DataType::Time64(TimeUnit::Microsecond)
);
make_type!(
Time64NanosecondType,
i64,
DataType::Time64(TimeUnit::Nanosecond)
);
make_type!(
IntervalYearMonthType,
i32,
DataType::Interval(IntervalUnit::YearMonth)
);
make_type!(
IntervalDayTimeType,
i64,
DataType::Interval(IntervalUnit::DayTime)
);
make_type!(
DurationSecondType,
i64,
DataType::Duration(TimeUnit::Second)
);
make_type!(
DurationMillisecondType,
i64,
DataType::Duration(TimeUnit::Millisecond)
);
make_type!(
DurationMicrosecondType,
i64,
DataType::Duration(TimeUnit::Microsecond)
);
make_type!(
DurationNanosecondType,
i64,
DataType::Duration(TimeUnit::Nanosecond)
);
/// A subtype of primitive type that represents legal dictionary keys.
/// See https://arrow.apache.org/docs/format/Columnar.html
pub trait ArrowDictionaryKeyType: ArrowPrimitiveType {}
impl ArrowDictionaryKeyType for Int8Type {}
impl ArrowDictionaryKeyType for Int16Type {}
impl ArrowDictionaryKeyType for Int32Type {}
impl ArrowDictionaryKeyType for Int64Type {}
impl ArrowDictionaryKeyType for UInt8Type {}
impl ArrowDictionaryKeyType for UInt16Type {}
impl ArrowDictionaryKeyType for UInt32Type {}
impl ArrowDictionaryKeyType for UInt64Type {}
/// A subtype of primitive type that represents numeric values.
///
/// SIMD operations are defined in this trait if available on the target system.
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
pub trait ArrowNumericType: ArrowPrimitiveType
where
Self::Simd: Add<Output = Self::Simd>
+ Sub<Output = Self::Simd>
+ Mul<Output = Self::Simd>
+ Div<Output = Self::Simd>
+ Copy,
Self::SimdMask: BitAnd<Output = Self::SimdMask>
+ BitOr<Output = Self::SimdMask>
+ BitAndAssign
+ BitOrAssign
+ Not<Output = Self::SimdMask>
+ Copy,
{
/// Defines the SIMD type that should be used for this numeric type
type Simd;
/// Defines the SIMD Mask type that should be used for this numeric type
type SimdMask;
/// The number of SIMD lanes available
fn lanes() -> usize;
/// Initializes a SIMD register to a constant value
fn init(value: Self::Native) -> Self::Simd;
/// Loads a slice into a SIMD register
fn load(slice: &[Self::Native]) -> Self::Simd;
/// Creates a new SIMD mask for this SIMD type filling it with `value`
fn mask_init(value: bool) -> Self::SimdMask;
/// Creates a new SIMD mask for this SIMD type from the lower-most bits of the given `mask`.
/// The number of bits used corresponds to the number of lanes of this type
fn mask_from_u64(mask: u64) -> Self::SimdMask;
/// Gets the value of a single lane in a SIMD mask
fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool;
/// Gets the bitmask for a SimdMask as a byte slice and passes it to the closure used as the action parameter
fn bitmask<T>(mask: &Self::SimdMask, action: T)
where
T: FnMut(&[u8]);
/// Sets the value of a single lane of a SIMD mask
fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask;
/// Selects elements of `a` and `b` using `mask`
fn mask_select(mask: Self::SimdMask, a: Self::Simd, b: Self::Simd) -> Self::Simd;
/// Returns `true` if any of the lanes in the mask are `true`
fn mask_any(mask: Self::SimdMask) -> bool;
/// Performs a SIMD binary operation
fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>(
left: Self::Simd,
right: Self::Simd,
op: F,
) -> Self::Simd;
/// SIMD version of equal
fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of not equal
fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of less than
fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of less than or equal to
fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of greater than
fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of greater than or equal to
fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// Writes a SIMD result back to a slice
fn write(simd_result: Self::Simd, slice: &mut [Self::Native]);
}
#[cfg(any(
not(any(target_arch = "x86", target_arch = "x86_64")),
not(feature = "simd")
))]
pub trait ArrowNumericType: ArrowPrimitiveType {}
macro_rules! make_numeric_type {
($impl_ty:ty, $native_ty:ty, $simd_ty:ident, $simd_mask_ty:ident) => {
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
impl ArrowNumericType for $impl_ty {
type Simd = $simd_ty;
type SimdMask = $simd_mask_ty;
#[inline]
fn lanes() -> usize {
Self::Simd::lanes()
}
#[inline]
fn init(value: Self::Native) -> Self::Simd {
Self::Simd::splat(value)
}
#[inline]
fn load(slice: &[Self::Native]) -> Self::Simd {
unsafe { Self::Simd::from_slice_unaligned_unchecked(slice) }
}
#[inline]
fn mask_init(value: bool) -> Self::SimdMask {
Self::SimdMask::splat(value)
}
#[inline]
fn mask_from_u64(mask: u64) -> Self::SimdMask {
// this match will get removed by the compiler since the number of lanes is known at
// compile-time for each concrete numeric type
match Self::lanes() {
8 => {
// the bit position in each lane indicates the index of that lane
let vecidx = i64x8::new(1, 2, 4, 8, 16, 32, 64, 128);
// broadcast the lowermost 8 bits of mask to each lane
let vecmask = i64x8::splat((mask & 0xFF) as i64);
// compute whether the bit corresponding to each lanes index is set
let vecmask = (vecidx & vecmask).eq(vecidx);
// transmute is necessary because the different match arms return different
// mask types, at runtime only one of those expressions will exist per type,
// with the type being equal to `SimdMask`.
unsafe { std::mem::transmute(vecmask) }
}
16 => {
// same general logic as for 8 lanes, extended to 16 bits
let vecidx = i32x16::new(
1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096,
8192, 16384, 32768,
);
let vecmask = i32x16::splat((mask & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
unsafe { std::mem::transmute(vecmask) }
}
32 => {
// compute two separate m32x16 vector masks from from the lower-most 32 bits of `mask`
// and then combine them into one m16x32 vector mask by writing and reading a temporary
let tmp = &mut [0_i16; 32];
let vecidx = i32x16::new(
1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096,
8192, 16384, 32768,
);
let vecmask = i32x16::splat((mask & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i16x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[0..16]);
let vecmask = i32x16::splat(((mask >> 16) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i16x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[16..32]);
unsafe { std::mem::transmute(i16x32::from_slice_unaligned(tmp)) }
}
64 => {
// compute four m32x16 vector masks from from all 64 bits of `mask`
// and convert them into one m8x64 vector mask by writing and reading a temporary
let tmp = &mut [0_i8; 64];
let vecidx = i32x16::new(
1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096,
8192, 16384, 32768,
);
let vecmask = i32x16::splat((mask & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[0..16]);
let vecmask = i32x16::splat(((mask >> 16) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[16..32]);
let vecmask = i32x16::splat(((mask >> 32) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[32..48]);
let vecmask = i32x16::splat(((mask >> 48) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[48..64]);
unsafe { std::mem::transmute(i8x64::from_slice_unaligned(tmp)) }
}
_ => panic!("Invalid number of vector lanes"),
}
}
#[inline]
fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool {
unsafe { mask.extract_unchecked(idx) }
}
fn bitmask<T>(mask: &Self::SimdMask, mut action: T)
where
T: FnMut(&[u8]),
{
action(mask.bitmask().to_byte_slice());
}
#[inline]
fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask {
unsafe { mask.replace_unchecked(idx, value) }
}
/// Selects elements of `a` and `b` using `mask`
#[inline]
fn mask_select(
mask: Self::SimdMask,
a: Self::Simd,
b: Self::Simd,
) -> Self::Simd {
mask.select(a, b)
}
#[inline]
fn mask_any(mask: Self::SimdMask) -> bool {
mask.any()
}
#[inline]
fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>(
left: Self::Simd,
right: Self::Simd,
op: F,
) -> Self::Simd {
op(left, right)
}
#[inline]
fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.eq(right)
}
#[inline]
fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.ne(right)
}
#[inline]
fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.lt(right)
}
#[inline]
fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.le(right)
}
#[inline]
fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.gt(right)
}
#[inline]
fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.ge(right)
}
#[inline]
fn write(simd_result: Self::Simd, slice: &mut [Self::Native]) {
unsafe { simd_result.write_to_slice_unaligned_unchecked(slice) };
}
}
#[cfg(any(
not(any(target_arch = "x86", target_arch = "x86_64")),
not(feature = "simd")
))]
impl ArrowNumericType for $impl_ty {}
};
}
make_numeric_type!(Int8Type, i8, i8x64, m8x64);
make_numeric_type!(Int16Type, i16, i16x32, m16x32);
make_numeric_type!(Int32Type, i32, i32x16, m32x16);
make_numeric_type!(Int64Type, i64, i64x8, m64x8);
make_numeric_type!(UInt8Type, u8, u8x64, m8x64);
make_numeric_type!(UInt16Type, u16, u16x32, m16x32);
make_numeric_type!(UInt32Type, u32, u32x16, m32x16);
make_numeric_type!(UInt64Type, u64, u64x8, m64x8);
make_numeric_type!(Float32Type, f32, f32x16, m32x16);
make_numeric_type!(Float64Type, f64, f64x8, m64x8);
make_numeric_type!(TimestampSecondType, i64, i64x8, m64x8);
make_numeric_type!(TimestampMillisecondType, i64, i64x8, m64x8);
make_numeric_type!(TimestampMicrosecondType, i64, i64x8, m64x8);
make_numeric_type!(TimestampNanosecondType, i64, i64x8, m64x8);
make_numeric_type!(Date32Type, i32, i32x16, m32x16);
make_numeric_type!(Date64Type, i64, i64x8, m64x8);
make_numeric_type!(Time32SecondType, i32, i32x16, m32x16);
make_numeric_type!(Time32MillisecondType, i32, i32x16, m32x16);
make_numeric_type!(Time64MicrosecondType, i64, i64x8, m64x8);
make_numeric_type!(Time64NanosecondType, i64, i64x8, m64x8);
make_numeric_type!(IntervalYearMonthType, i32, i32x16, m32x16);
make_numeric_type!(IntervalDayTimeType, i64, i64x8, m64x8);
make_numeric_type!(DurationSecondType, i64, i64x8, m64x8);
make_numeric_type!(DurationMillisecondType, i64, i64x8, m64x8);
make_numeric_type!(DurationMicrosecondType, i64, i64x8, m64x8);
make_numeric_type!(DurationNanosecondType, i64, i64x8, m64x8);
/// A subtype of primitive type that represents signed numeric values.
///
/// SIMD operations are defined in this trait if available on the target system.
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
pub trait ArrowSignedNumericType: ArrowNumericType
where
Self::SignedSimd: Neg<Output = Self::SignedSimd>,
{
/// Defines the SIMD type that should be used for this numeric type
type SignedSimd;
/// Loads a slice of signed numeric type into a SIMD register
fn load_signed(slice: &[Self::Native]) -> Self::SignedSimd;
/// Performs a SIMD unary operation on signed numeric type
fn signed_unary_op<F: Fn(Self::SignedSimd) -> Self::SignedSimd>(
a: Self::SignedSimd,
op: F,
) -> Self::SignedSimd;
/// Writes a signed SIMD result back to a slice
fn write_signed(simd_result: Self::SignedSimd, slice: &mut [Self::Native]);
}
#[cfg(any(
not(any(target_arch = "x86", target_arch = "x86_64")),
not(feature = "simd")
))]
pub trait ArrowSignedNumericType: ArrowNumericType
where
Self::Native: Neg<Output = Self::Native>,
{
}
macro_rules! make_signed_numeric_type {
($impl_ty:ty, $simd_ty:ident) => {
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
impl ArrowSignedNumericType for $impl_ty {
type SignedSimd = $simd_ty;
#[inline]
fn load_signed(slice: &[Self::Native]) -> Self::SignedSimd {
unsafe { Self::SignedSimd::from_slice_unaligned_unchecked(slice) }
}
#[inline]
fn signed_unary_op<F: Fn(Self::SignedSimd) -> Self::SignedSimd>(
a: Self::SignedSimd,
op: F,
) -> Self::SignedSimd {
op(a)
}
#[inline]
fn write_signed(simd_result: Self::SignedSimd, slice: &mut [Self::Native]) {
unsafe { simd_result.write_to_slice_unaligned_unchecked(slice) };
}
}
#[cfg(any(
not(any(target_arch = "x86", target_arch = "x86_64")),
not(feature = "simd")
))]
impl ArrowSignedNumericType for $impl_ty {}
};
}
make_signed_numeric_type!(Int8Type, i8x64);
make_signed_numeric_type!(Int16Type, i16x32);
make_signed_numeric_type!(Int32Type, i32x16);
make_signed_numeric_type!(Int64Type, i64x8);
make_signed_numeric_type!(Float32Type, f32x16);
make_signed_numeric_type!(Float64Type, f64x8);
/// A subtype of primitive type that represents temporal values.
pub trait ArrowTemporalType: ArrowPrimitiveType {}
impl ArrowTemporalType for TimestampSecondType {}
impl ArrowTemporalType for TimestampMillisecondType {}
impl ArrowTemporalType for TimestampMicrosecondType {}
impl ArrowTemporalType for TimestampNanosecondType {}
impl ArrowTemporalType for Date32Type {}
impl ArrowTemporalType for Date64Type {}
impl ArrowTemporalType for Time32SecondType {}
impl ArrowTemporalType for Time32MillisecondType {}
impl ArrowTemporalType for Time64MicrosecondType {}
impl ArrowTemporalType for Time64NanosecondType {}
// impl ArrowTemporalType for IntervalYearMonthType {}
// impl ArrowTemporalType for IntervalDayTimeType {}
/// A timestamp type allows us to create array builders that take a timestamp.
pub trait ArrowTimestampType: ArrowTemporalType {
/// Returns the `TimeUnit` of this timestamp.
fn get_time_unit() -> TimeUnit;
}
impl ArrowTimestampType for TimestampSecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Second
}
}
impl ArrowTimestampType for TimestampMillisecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Millisecond
}
}
impl ArrowTimestampType for TimestampMicrosecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Microsecond
}
}
impl ArrowTimestampType for TimestampNanosecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Nanosecond
}
}
/// Allows conversion from supported Arrow types to a byte slice.
pub trait ToByteSlice {
/// Converts this instance into a byte slice
fn to_byte_slice(&self) -> &[u8];
}
impl<T: ArrowNativeType> ToByteSlice for [T] {
fn to_byte_slice(&self) -> &[u8] {
let raw_ptr = self.as_ptr() as *const T as *const u8;
unsafe { from_raw_parts(raw_ptr, self.len() * size_of::<T>()) }
}
}
impl<T: ArrowNativeType> ToByteSlice for T {
fn to_byte_slice(&self) -> &[u8] {
let raw_ptr = self as *const T as *const u8;
unsafe { from_raw_parts(raw_ptr, size_of::<T>()) }
}
}
impl DataType {
/// Parse a data type from a JSON representation
pub(crate) fn from(json: &Value) -> Result<DataType> {
let default_field = Field::new("", DataType::Boolean, true);
match *json {
Value::Object(ref map) => match map.get("name") {
Some(s) if s == "null" => Ok(DataType::Null),
Some(s) if s == "bool" => Ok(DataType::Boolean),
Some(s) if s == "binary" => Ok(DataType::Binary),
Some(s) if s == "largebinary" => Ok(DataType::LargeBinary),
Some(s) if s == "utf8" => Ok(DataType::Utf8),
Some(s) if s == "largeutf8" => Ok(DataType::LargeUtf8),
Some(s) if s == "fixedsizebinary" => {
// return a list with any type as its child isn't defined in the map
if let Some(Value::Number(size)) = map.get("byteWidth") {
Ok(DataType::FixedSizeBinary(size.as_i64().unwrap() as i32))
} else {
Err(ArrowError::ParseError(
"Expecting a byteWidth for fixedsizebinary".to_string(),
))
}
}
Some(s) if s == "decimal" => {
// return a list with any type as its child isn't defined in the map
let precision = match map.get("precision") {
Some(p) => Ok(p.as_u64().unwrap() as usize),
None => Err(ArrowError::ParseError(
"Expecting a precision for decimal".to_string(),
)),
};
let scale = match map.get("scale") {
Some(s) => Ok(s.as_u64().unwrap() as usize),
_ => Err(ArrowError::ParseError(
"Expecting a scale for decimal".to_string(),
)),
};
Ok(DataType::Decimal(precision?, scale?))
}
Some(s) if s == "floatingpoint" => match map.get("precision") {
Some(p) if p == "HALF" => Ok(DataType::Float16),
Some(p) if p == "SINGLE" => Ok(DataType::Float32),
Some(p) if p == "DOUBLE" => Ok(DataType::Float64),
_ => Err(ArrowError::ParseError(
"floatingpoint precision missing or invalid".to_string(),
)),
},
Some(s) if s == "timestamp" => {
let unit = match map.get("unit") {
Some(p) if p == "SECOND" => Ok(TimeUnit::Second),
Some(p) if p == "MILLISECOND" => Ok(TimeUnit::Millisecond),
Some(p) if p == "MICROSECOND" => Ok(TimeUnit::Microsecond),
Some(p) if p == "NANOSECOND" => Ok(TimeUnit::Nanosecond),
_ => Err(ArrowError::ParseError(
"timestamp unit missing or invalid".to_string(),
)),
};
let tz = match map.get("timezone") {
None => Ok(None),
Some(VString(tz)) => Ok(Some(Arc::new(tz.to_string()))),
_ => Err(ArrowError::ParseError(
"timezone must be a string".to_string(),
)),
};
Ok(DataType::Timestamp(unit?, tz?))
}
Some(s) if s == "date" => match map.get("unit") {
Some(p) if p == "DAY" => Ok(DataType::Date32(DateUnit::Day)),
Some(p) if p == "MILLISECOND" => {
Ok(DataType::Date64(DateUnit::Millisecond))
}
_ => Err(ArrowError::ParseError(
"date unit missing or invalid".to_string(),
)),
},
Some(s) if s == "time" => {
let unit = match map.get("unit") {
Some(p) if p == "SECOND" => Ok(TimeUnit::Second),
Some(p) if p == "MILLISECOND" => Ok(TimeUnit::Millisecond),
Some(p) if p == "MICROSECOND" => Ok(TimeUnit::Microsecond),
Some(p) if p == "NANOSECOND" => Ok(TimeUnit::Nanosecond),
_ => Err(ArrowError::ParseError(
"time unit missing or invalid".to_string(),
)),
};
match map.get("bitWidth") {
Some(p) if p == 32 => Ok(DataType::Time32(unit?)),
Some(p) if p == 64 => Ok(DataType::Time64(unit?)),
_ => Err(ArrowError::ParseError(
"time bitWidth missing or invalid".to_string(),
)),
}
}
Some(s) if s == "duration" => match map.get("unit") {
Some(p) if p == "SECOND" => Ok(DataType::Duration(TimeUnit::Second)),
Some(p) if p == "MILLISECOND" => {
Ok(DataType::Duration(TimeUnit::Millisecond))
}
Some(p) if p == "MICROSECOND" => {
Ok(DataType::Duration(TimeUnit::Microsecond))
}
Some(p) if p == "NANOSECOND" => {
Ok(DataType::Duration(TimeUnit::Nanosecond))
}
_ => Err(ArrowError::ParseError(
"time unit missing or invalid".to_string(),
)),
},
Some(s) if s == "interval" => match map.get("unit") {
Some(p) if p == "DAY_TIME" => {
Ok(DataType::Interval(IntervalUnit::DayTime))
}
Some(p) if p == "YEAR_MONTH" => {
Ok(DataType::Interval(IntervalUnit::YearMonth))
}
_ => Err(ArrowError::ParseError(
"interval unit missing or invalid".to_string(),
)),
},
Some(s) if s == "int" => match map.get("isSigned") {
Some(&Value::Bool(true)) => match map.get("bitWidth") {
Some(&Value::Number(ref n)) => match n.as_u64() {
Some(8) => Ok(DataType::Int8),
Some(16) => Ok(DataType::Int16),
Some(32) => Ok(DataType::Int32),
Some(64) => Ok(DataType::Int64),
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
Some(&Value::Bool(false)) => match map.get("bitWidth") {
Some(&Value::Number(ref n)) => match n.as_u64() {
Some(8) => Ok(DataType::UInt8),
Some(16) => Ok(DataType::UInt16),
Some(32) => Ok(DataType::UInt32),
Some(64) => Ok(DataType::UInt64),
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
_ => Err(ArrowError::ParseError(
"int signed missing or invalid".to_string(),
)),
},
Some(s) if s == "list" => {
// return a list with any type as its child isn't defined in the map
Ok(DataType::List(Box::new(default_field)))
}
Some(s) if s == "largelist" => {
// return a largelist with any type as its child isn't defined in the map
Ok(DataType::LargeList(Box::new(default_field)))
}
Some(s) if s == "fixedsizelist" => {
// return a list with any type as its child isn't defined in the map
if let Some(Value::Number(size)) = map.get("listSize") {
Ok(DataType::FixedSizeList(
Box::new(default_field),
size.as_i64().unwrap() as i32,
))
} else {
Err(ArrowError::ParseError(
"Expecting a listSize for fixedsizelist".to_string(),
))
}
}
Some(s) if s == "struct" => {
// return an empty `struct` type as its children aren't defined in the map
Ok(DataType::Struct(vec![]))
}
Some(other) => Err(ArrowError::ParseError(format!(
"invalid or unsupported type name: {} in {:?}",
other, json
))),
None => Err(ArrowError::ParseError("type name missing".to_string())),
},
_ => Err(ArrowError::ParseError(
"invalid json value type".to_string(),
)),
}
}
/// Generate a JSON representation of the data type
pub fn to_json(&self) -> Value {
match self {
DataType::Null => json!({"name": "null"}),
DataType::Boolean => json!({"name": "bool"}),
DataType::Int8 => json!({"name": "int", "bitWidth": 8, "isSigned": true}),
DataType::Int16 => json!({"name": "int", "bitWidth": 16, "isSigned": true}),
DataType::Int32 => json!({"name": "int", "bitWidth": 32, "isSigned": true}),
DataType::Int64 => json!({"name": "int", "bitWidth": 64, "isSigned": true}),
DataType::UInt8 => json!({"name": "int", "bitWidth": 8, "isSigned": false}),
DataType::UInt16 => json!({"name": "int", "bitWidth": 16, "isSigned": false}),
DataType::UInt32 => json!({"name": "int", "bitWidth": 32, "isSigned": false}),
DataType::UInt64 => json!({"name": "int", "bitWidth": 64, "isSigned": false}),
DataType::Float16 => json!({"name": "floatingpoint", "precision": "HALF"}),
DataType::Float32 => json!({"name": "floatingpoint", "precision": "SINGLE"}),
DataType::Float64 => json!({"name": "floatingpoint", "precision": "DOUBLE"}),
DataType::Utf8 => json!({"name": "utf8"}),
DataType::LargeUtf8 => json!({"name": "largeutf8"}),
DataType::Binary => json!({"name": "binary"}),
DataType::LargeBinary => json!({"name": "largebinary"}),
DataType::FixedSizeBinary(byte_width) => {
json!({"name": "fixedsizebinary", "byteWidth": byte_width})
}
DataType::Struct(_) => json!({"name": "struct"}),
DataType::Union(_) => json!({"name": "union"}),
DataType::List(_) => json!({ "name": "list"}),
DataType::LargeList(_) => json!({ "name": "largelist"}),
DataType::FixedSizeList(_, length) => {
json!({"name":"fixedsizelist", "listSize": length})
}
DataType::Time32(unit) => {
json!({"name": "time", "bitWidth": 32, "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}})
}
DataType::Time64(unit) => {
json!({"name": "time", "bitWidth": 64, "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}})
}
DataType::Date32(unit) | DataType::Date64(unit) => {
json!({"name": "date", "unit": match unit {
DateUnit::Day => "DAY",
DateUnit::Millisecond => "MILLISECOND",
}})
}
DataType::Timestamp(unit, None) => {
json!({"name": "timestamp", "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}})
}
DataType::Timestamp(unit, Some(tz)) => {
json!({"name": "timestamp", "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}, "timezone": tz})
}
DataType::Interval(unit) => json!({"name": "interval", "unit": match unit {
IntervalUnit::YearMonth => "YEAR_MONTH",
IntervalUnit::DayTime => "DAY_TIME",
}}),
DataType::Duration(unit) => json!({"name": "duration", "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}}),
DataType::Dictionary(_, _) => json!({ "name": "dictionary"}),
DataType::Decimal(precision, scale) => {
json!({"name": "decimal", "precision": precision, "scale": scale})
}
}
}
/// Returns true if this type is numeric: (UInt*, Unit*, or Float*)
pub fn is_numeric(t: &DataType) -> bool {
use DataType::*;
matches!(
t,
UInt8
| UInt16
| UInt32
| UInt64
| Int8
| Int16
| Int32
| Int64
| Float32
| Float64
)
}
}
impl Field {
/// Creates a new field
pub fn new(name: &str, data_type: DataType, nullable: bool) -> Self {
Field {
name: name.to_string(),
data_type,
nullable,
dict_id: 0,
dict_is_ordered: false,
}
}
/// Creates a new field
pub fn new_dict(
name: &str,
data_type: DataType,
nullable: bool,
dict_id: i64,
dict_is_ordered: bool,
) -> Self {
Field {
name: name.to_string(),
data_type,
nullable,
dict_id,
dict_is_ordered,
}
}
/// Returns an immutable reference to the `Field`'s name
#[inline]
pub const fn name(&self) -> &String {
&self.name
}
/// Returns an immutable reference to the `Field`'s data-type
#[inline]
pub const fn data_type(&self) -> &DataType {
&self.data_type
}
/// Indicates whether this `Field` supports null values
#[inline]
pub const fn is_nullable(&self) -> bool {
self.nullable
}
/// Returns the dictionary ID, if this is a dictionary type
#[inline]
pub const fn dict_id(&self) -> Option<i64> {
match self.data_type {
DataType::Dictionary(_, _) => Some(self.dict_id),
_ => None,
}
}
/// Returns whether this `Field`'s dictionary is ordered, if this is a dictionary type
#[inline]
pub const fn dict_is_ordered(&self) -> Option<bool> {
match self.data_type {
DataType::Dictionary(_, _) => Some(self.dict_is_ordered),
_ => None,
}
}
/// Parse a `Field` definition from a JSON representation
pub fn from(json: &Value) -> Result<Self> {
match *json {
Value::Object(ref map) => {
let name = match map.get("name") {
Some(&Value::String(ref name)) => name.to_string(),
_ => {
return Err(ArrowError::ParseError(
"Field missing 'name' attribute".to_string(),
));
}
};
let nullable = match map.get("nullable") {
Some(&Value::Bool(b)) => b,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'nullable' attribute".to_string(),
));
}
};
let data_type = match map.get("type") {
Some(t) => DataType::from(t)?,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'type' attribute".to_string(),
));
}
};
// if data_type is a struct or list, get its children
let data_type = match data_type {
DataType::List(_)
| DataType::LargeList(_)
| DataType::FixedSizeList(_, _) => match map.get("children") {
Some(Value::Array(values)) => {
if values.len() != 1 {
return Err(ArrowError::ParseError(
"Field 'children' must have one element for a list data type".to_string(),
));
}
match data_type {
DataType::List(_) => DataType::List(Box::new(
Self::from(&values[0])?,
)),
DataType::LargeList(_) => DataType::LargeList(Box::new(
Self::from(&values[0])?,
)),
DataType::FixedSizeList(_, int) => {
DataType::FixedSizeList(
Box::new(Self::from(&values[0])?),
int,
)
}
_ => unreachable!(
"Data type should be a list, largelist or fixedsizelist"
),
}
}
Some(_) => {
return Err(ArrowError::ParseError(
"Field 'children' must be an array".to_string(),
))
}
None => {
return Err(ArrowError::ParseError(
"Field missing 'children' attribute".to_string(),
));
}
},
DataType::Struct(mut fields) => match map.get("children") {
Some(Value::Array(values)) => {
let struct_fields: Result<Vec<Field>> =
values.iter().map(|v| Field::from(v)).collect();
fields.append(&mut struct_fields?);
DataType::Struct(fields)
}
Some(_) => {
return Err(ArrowError::ParseError(
"Field 'children' must be an array".to_string(),
))
}
None => {
return Err(ArrowError::ParseError(
"Field missing 'children' attribute".to_string(),
));
}
},
_ => data_type,
};
let mut dict_id = 0;
let mut dict_is_ordered = false;
let data_type = match map.get("dictionary") {
Some(dictionary) => {
let index_type = match dictionary.get("indexType") {
Some(t) => DataType::from(t)?,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'indexType' attribute".to_string(),
));
}
};
dict_id = match dictionary.get("id") {
Some(Value::Number(n)) => n.as_i64().unwrap(),
_ => {
return Err(ArrowError::ParseError(
"Field missing 'id' attribute".to_string(),
));
}
};
dict_is_ordered = match dictionary.get("isOrdered") {
Some(&Value::Bool(n)) => n,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'isOrdered' attribute".to_string(),
));
}
};
DataType::Dictionary(Box::new(index_type), Box::new(data_type))
}
_ => data_type,
};
Ok(Field {
name,
nullable,
data_type,
dict_id,
dict_is_ordered,
})
}
_ => Err(ArrowError::ParseError(
"Invalid json value type for field".to_string(),
)),
}
}
/// Generate a JSON representation of the `Field`
pub fn to_json(&self) -> Value {
let children: Vec<Value> = match self.data_type() {
DataType::Struct(fields) => fields.iter().map(|f| f.to_json()).collect(),
DataType::List(field) => vec![field.to_json()],
DataType::LargeList(field) => vec![field.to_json()],
DataType::FixedSizeList(field, _) => vec![field.to_json()],
_ => vec![],
};
match self.data_type() {
DataType::Dictionary(ref index_type, ref value_type) => json!({
"name": self.name,
"nullable": self.nullable,
"type": value_type.to_json(),
"children": children,
"dictionary": {
"id": self.dict_id,
"indexType": index_type.to_json(),
"isOrdered": self.dict_is_ordered
}
}),
_ => json!({
"name": self.name,
"nullable": self.nullable,
"type": self.data_type.to_json(),
"children": children
}),
}
}
/// Merge field into self if it is compatible. Struct will be merged recursively.
///
/// Example:
///
/// ```
/// use arrow::datatypes::*;
///
/// let mut field = Field::new("c1", DataType::Int64, false);
/// assert!(field.try_merge(&Field::new("c1", DataType::Int64, true)).is_ok());
/// assert!(field.is_nullable());
/// ```
pub fn try_merge(&mut self, from: &Field) -> Result<()> {
if from.dict_id != self.dict_id {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting dict_id".to_string(),
));
}
if from.dict_is_ordered != self.dict_is_ordered {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting dict_is_ordered"
.to_string(),
));
}
match &mut self.data_type {
DataType::Struct(nested_fields) => match &from.data_type {
DataType::Struct(from_nested_fields) => {
for from_field in from_nested_fields {
let mut is_new_field = true;
for self_field in nested_fields.iter_mut() {
if self_field.name != from_field.name {
continue;
}
is_new_field = false;
self_field.try_merge(&from_field)?;
}
if is_new_field {
nested_fields.push(from_field.clone());
}
}
}
_ => {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting datatype"
.to_string(),
));
}
},
DataType::Union(nested_fields) => match &from.data_type {
DataType::Union(from_nested_fields) => {
for from_field in from_nested_fields {
let mut is_new_field = true;
for self_field in nested_fields.iter_mut() {
if from_field == self_field {
is_new_field = false;
break;
}
}
if is_new_field {
nested_fields.push(from_field.clone());
}
}
}
_ => {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting datatype"
.to_string(),
));
}
},
DataType::Null
| DataType::Boolean
| DataType::Int8
| DataType::Int16
| DataType::Int32
| DataType::Int64
| DataType::UInt8
| DataType::UInt16
| DataType::UInt32
| DataType::UInt64
| DataType::Float16
| DataType::Float32
| DataType::Float64
| DataType::Timestamp(_, _)
| DataType::Date32(_)
| DataType::Date64(_)
| DataType::Time32(_)
| DataType::Time64(_)
| DataType::Duration(_)
| DataType::Binary
| DataType::LargeBinary
| DataType::Interval(_)
| DataType::LargeList(_)
| DataType::List(_)
| DataType::Dictionary(_, _)
| DataType::FixedSizeList(_, _)
| DataType::FixedSizeBinary(_)
| DataType::Utf8
| DataType::LargeUtf8
| DataType::Decimal(_, _) => {
if self.data_type != from.data_type {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting datatype"
.to_string(),
));
}
}
}
if from.nullable {
self.nullable = from.nullable;
}
Ok(())
}
}
impl fmt::Display for Field {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}: {:?}", self.name, self.data_type)
}
}
/// Describes the meta-data of an ordered sequence of relative types.
///
/// Note that this information is only part of the meta-data and not part of the physical
/// memory layout.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Schema {
pub(crate) fields: Vec<Field>,
/// A map of key-value pairs containing additional meta data.
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub(crate) metadata: HashMap<String, String>,
}
impl Schema {
/// Creates an empty `Schema`
pub fn empty() -> Self {
Self {
fields: vec![],
metadata: HashMap::new(),
}
}
/// Creates a new `Schema` from a sequence of `Field` values
///
/// # Example
///
/// ```
/// # extern crate arrow;
/// # use arrow::datatypes::{Field, DataType, Schema};
/// let field_a = Field::new("a", DataType::Int64, false);
/// let field_b = Field::new("b", DataType::Boolean, false);
///
/// let schema = Schema::new(vec![field_a, field_b]);
/// ```
pub fn new(fields: Vec<Field>) -> Self {
Self::new_with_metadata(fields, HashMap::new())
}
/// Creates a new `Schema` from a sequence of `Field` values
/// and adds additional metadata in form of key value pairs.
///
/// # Example
///
/// ```
/// # extern crate arrow;
/// # use arrow::datatypes::{Field, DataType, Schema};
/// # use std::collections::HashMap;
/// let field_a = Field::new("a", DataType::Int64, false);
/// let field_b = Field::new("b", DataType::Boolean, false);
///
/// let mut metadata: HashMap<String, String> = HashMap::new();
/// metadata.insert("row_count".to_string(), "100".to_string());
///
/// let schema = Schema::new_with_metadata(vec![field_a, field_b], metadata);
/// ```
#[inline]
pub const fn new_with_metadata(
fields: Vec<Field>,
metadata: HashMap<String, String>,
) -> Self {
Self { fields, metadata }
}
/// Merge schema into self if it is compatible. Struct fields will be merged recursively.
///
/// Example:
///
/// ```
/// use arrow::datatypes::*;
///
/// let merged = Schema::try_merge(&vec![
/// Schema::new(vec![
/// Field::new("c1", DataType::Int64, false),
/// Field::new("c2", DataType::Utf8, false),
/// ]),
/// Schema::new(vec![
/// Field::new("c1", DataType::Int64, true),
/// Field::new("c2", DataType::Utf8, false),
/// Field::new("c3", DataType::Utf8, false),
/// ]),
/// ]).unwrap();
///
/// assert_eq!(
/// merged,
/// Schema::new(vec![
/// Field::new("c1", DataType::Int64, true),
/// Field::new("c2", DataType::Utf8, false),
/// Field::new("c3", DataType::Utf8, false),
/// ]),
/// );
/// ```
pub fn try_merge(schemas: &[Self]) -> Result<Self> {
let mut merged = Self::empty();
for schema in schemas {
for (key, value) in schema.metadata.iter() {
// merge metadata
match merged.metadata.get(key) {
Some(old_val) => {
if old_val != value {
return Err(ArrowError::SchemaError(
"Fail to merge schema due to conflicting metadata"
.to_string(),
));
}
}
None => {
merged.metadata.insert(key.clone(), value.clone());
}
}
}
// merge fileds
for field in &schema.fields {
let mut new_field = true;
for merged_field in &mut merged.fields {
if field.name != merged_field.name {
continue;
}
new_field = false;
merged_field.try_merge(field)?
}
// found a new field, add to field list
if new_field {
merged.fields.push(field.clone());
}
}
}
Ok(merged)
}
/// Returns an immutable reference of the vector of `Field` instances
#[inline]
pub const fn fields(&self) -> &Vec<Field> {
&self.fields
}
/// Returns an immutable reference of a specific `Field` instance selected using an
/// offset within the internal `fields` vector
pub fn field(&self, i: usize) -> &Field {
&self.fields[i]
}
/// Returns an immutable reference of a specific `Field` instance selected by name
pub fn field_with_name(&self, name: &str) -> Result<&Field> {
Ok(&self.fields[self.index_of(name)?])
}
/// Returns a vector of immutable references to all `Field` instances selected by
/// the dictionary ID they use
pub fn fields_with_dict_id(&self, dict_id: i64) -> Vec<&Field> {
self.fields
.iter()
.filter(|f| f.dict_id() == Some(dict_id))
.collect()
}
/// Find the index of the column with the given name
pub fn index_of(&self, name: &str) -> Result<usize> {
for i in 0..self.fields.len() {
if self.fields[i].name == name {
return Ok(i);
}
}
let valid_fields: Vec<String> =
self.fields.iter().map(|f| f.name().clone()).collect();
Err(ArrowError::InvalidArgumentError(format!(
"Unable to get field named \"{}\". Valid fields: {:?}",
name, valid_fields
)))
}
/// Returns an immutable reference to the Map of custom metadata key-value pairs.
#[inline]
pub const fn metadata(&self) -> &HashMap<String, String> {
&self.metadata
}
/// Look up a column by name and return a immutable reference to the column along with
/// it's index
pub fn column_with_name(&self, name: &str) -> Option<(usize, &Field)> {
self.fields
.iter()
.enumerate()
.find(|&(_, c)| c.name == name)
}
/// Generate a JSON representation of the `Schema`
pub fn to_json(&self) -> Value {
json!({
"fields": self.fields.iter().map(|field| field.to_json()).collect::<Vec<Value>>(),
"metadata": serde_json::to_value(&self.metadata).unwrap()
})
}
/// Parse a `Schema` definition from a JSON representation
pub fn from(json: &Value) -> Result<Self> {
match *json {
Value::Object(ref schema) => {
let fields = if let Some(Value::Array(fields)) = schema.get("fields") {
fields
.iter()
.map(|f| Field::from(f))
.collect::<Result<_>>()?
} else {
return Err(ArrowError::ParseError(
"Schema fields should be an array".to_string(),
));
};
let metadata = if let Some(value) = schema.get("metadata") {
Self::from_metadata(value)?
} else {
HashMap::default()
};
Ok(Self { fields, metadata })
}
_ => Err(ArrowError::ParseError(
"Invalid json value type for schema".to_string(),
)),
}
}
/// Parse a `metadata` definition from a JSON representation
/// The JSON can either be an Object or an Array of Objects
fn from_metadata(json: &Value) -> Result<HashMap<String, String>> {
match json {
Value::Array(_) => {
let mut hashmap = HashMap::new();
let values: Vec<MetadataKeyValue> = serde_json::from_value(json.clone())
.map_err(|_| {
ArrowError::JsonError(
"Unable to parse object into key-value pair".to_string(),
)
})?;
for meta in values {
hashmap.insert(meta.key.clone(), meta.value);
}
Ok(hashmap)
}
Value::Object(md) => md
.iter()
.map(|(k, v)| {
if let Value::String(v) = v {
Ok((k.to_string(), v.to_string()))
} else {
Err(ArrowError::ParseError(
"metadata `value` field must be a string".to_string(),
))
}
})
.collect::<Result<_>>(),
_ => Err(ArrowError::ParseError(
"`metadata` field must be an object".to_string(),
)),
}
}
}
impl fmt::Display for Schema {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(
&self
.fields
.iter()
.map(|c| c.to_string())
.collect::<Vec<String>>()
.join(", "),
)
}
}
/// A reference-counted reference to a [`Schema`](crate::datatypes::Schema).
pub type SchemaRef = Arc<Schema>;
#[derive(Deserialize)]
struct MetadataKeyValue {
key: String,
value: String,
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::Number;
use serde_json::Value::{Bool, Number as VNumber};
use std::f32::NAN;
#[test]
fn create_struct_type() {
let _person = DataType::Struct(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
),
]);
}
#[test]
fn serde_struct_type() {
let person = DataType::Struct(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
),
]);
let serialized = serde_json::to_string(&person).unwrap();
// NOTE that this is testing the default (derived) serialization format, not the
// JSON format specified in metadata.md
assert_eq!(
"{\"Struct\":[\
{\"name\":\"first_name\",\"data_type\":\"Utf8\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false},\
{\"name\":\"last_name\",\"data_type\":\"Utf8\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false},\
{\"name\":\"address\",\"data_type\":{\"Struct\":\
[{\"name\":\"street\",\"data_type\":\"Utf8\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false},\
{\"name\":\"zip\",\"data_type\":\"UInt16\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false}\
]},\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false}]}",
serialized
);
let deserialized = serde_json::from_str(&serialized).unwrap();
assert_eq!(person, deserialized);
}
#[test]
fn struct_field_to_json() {
let f = Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
);
let value: Value = serde_json::from_str(
r#"{
"name": "address",
"nullable": false,
"type": {
"name": "struct"
},
"children": [
{
"name": "street",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
},
{
"name": "zip",
"nullable": false,
"type": {
"name": "int",
"bitWidth": 16,
"isSigned": false
},
"children": []
}
]
}"#,
)
.unwrap();
assert_eq!(value, f.to_json());
}
#[test]
fn primitive_field_to_json() {
let f = Field::new("first_name", DataType::Utf8, false);
let value: Value = serde_json::from_str(
r#"{
"name": "first_name",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
}"#,
)
.unwrap();
assert_eq!(value, f.to_json());
}
#[test]
fn parse_struct_from_json() {
let json = r#"
{
"name": "address",
"type": {
"name": "struct"
},
"nullable": false,
"children": [
{
"name": "street",
"type": {
"name": "utf8"
},
"nullable": false,
"children": []
},
{
"name": "zip",
"type": {
"name": "int",
"isSigned": false,
"bitWidth": 16
},
"nullable": false,
"children": []
}
]
}
"#;
let value: Value = serde_json::from_str(json).unwrap();
let dt = Field::from(&value).unwrap();
let expected = Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
);
assert_eq!(expected, dt);
}
#[test]
fn parse_utf8_from_json() {
let json = "{\"name\":\"utf8\"}";
let value: Value = serde_json::from_str(json).unwrap();
let dt = DataType::from(&value).unwrap();
assert_eq!(DataType::Utf8, dt);
}
#[test]
fn parse_int32_from_json() {
let json = "{\"name\": \"int\", \"isSigned\": true, \"bitWidth\": 32}";
let value: Value = serde_json::from_str(json).unwrap();
let dt = DataType::from(&value).unwrap();
assert_eq!(DataType::Int32, dt);
}
#[test]
fn schema_json() {
// Add some custom metadata
let metadata: HashMap<String, String> =
[("Key".to_string(), "Value".to_string())]
.iter()
.cloned()
.collect();
let schema = Schema::new_with_metadata(
vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Binary, false),
Field::new("c3", DataType::FixedSizeBinary(3), false),
Field::new("c4", DataType::Boolean, false),
Field::new("c5", DataType::Date32(DateUnit::Day), false),
Field::new("c6", DataType::Date64(DateUnit::Millisecond), false),
Field::new("c7", DataType::Time32(TimeUnit::Second), false),
Field::new("c8", DataType::Time32(TimeUnit::Millisecond), false),
Field::new("c9", DataType::Time32(TimeUnit::Microsecond), false),
Field::new("c10", DataType::Time32(TimeUnit::Nanosecond), false),
Field::new("c11", DataType::Time64(TimeUnit::Second), false),
Field::new("c12", DataType::Time64(TimeUnit::Millisecond), false),
Field::new("c13", DataType::Time64(TimeUnit::Microsecond), false),
Field::new("c14", DataType::Time64(TimeUnit::Nanosecond), false),
Field::new("c15", DataType::Timestamp(TimeUnit::Second, None), false),
Field::new(
"c16",
DataType::Timestamp(
TimeUnit::Millisecond,
Some(Arc::new("UTC".to_string())),
),
false,
),
Field::new(
"c17",
DataType::Timestamp(
TimeUnit::Microsecond,
Some(Arc::new("Africa/Johannesburg".to_string())),
),
false,
),
Field::new(
"c18",
DataType::Timestamp(TimeUnit::Nanosecond, None),
false,
),
Field::new("c19", DataType::Interval(IntervalUnit::DayTime), false),
Field::new("c20", DataType::Interval(IntervalUnit::YearMonth), false),
Field::new(
"c21",
DataType::List(Box::new(Field::new("item", DataType::Boolean, true))),
false,
),
Field::new(
"c22",
DataType::FixedSizeList(
Box::new(Field::new("bools", DataType::Boolean, false)),
5,
),
false,
),
Field::new(
"c23",
DataType::List(Box::new(Field::new(
"inner_list",
DataType::List(Box::new(Field::new(
"struct",
DataType::Struct(vec![]),
true,
))),
false,
))),
true,
),
Field::new(
"c24",
DataType::Struct(vec![
Field::new("a", DataType::Utf8, false),
Field::new("b", DataType::UInt16, false),
]),
false,
),
Field::new("c25", DataType::Interval(IntervalUnit::YearMonth), true),
Field::new("c26", DataType::Interval(IntervalUnit::DayTime), true),
Field::new("c27", DataType::Duration(TimeUnit::Second), false),
Field::new("c28", DataType::Duration(TimeUnit::Millisecond), false),
Field::new("c29", DataType::Duration(TimeUnit::Microsecond), false),
Field::new("c30", DataType::Duration(TimeUnit::Nanosecond), false),
Field::new_dict(
"c31",
DataType::Dictionary(
Box::new(DataType::Int32),
Box::new(DataType::Utf8),
),
true,
123,
true,
),
Field::new("c32", DataType::LargeBinary, true),
Field::new("c33", DataType::LargeUtf8, true),
Field::new(
"c34",
DataType::LargeList(Box::new(Field::new(
"inner_large_list",
DataType::LargeList(Box::new(Field::new(
"struct",
DataType::Struct(vec![]),
false,
))),
true,
))),
true,
),
],
metadata,
);
let expected = schema.to_json();
let json = r#"{
"fields": [
{
"name": "c1",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
},
{
"name": "c2",
"nullable": false,
"type": {
"name": "binary"
},
"children": []
},
{
"name": "c3",
"nullable": false,
"type": {
"name": "fixedsizebinary",
"byteWidth": 3
},
"children": []
},
{
"name": "c4",
"nullable": false,
"type": {
"name": "bool"
},
"children": []
},
{
"name": "c5",
"nullable": false,
"type": {
"name": "date",
"unit": "DAY"
},
"children": []
},
{
"name": "c6",
"nullable": false,
"type": {
"name": "date",
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c7",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "SECOND"
},
"children": []
},
{
"name": "c8",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c9",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "MICROSECOND"
},
"children": []
},
{
"name": "c10",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c11",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "SECOND"
},
"children": []
},
{
"name": "c12",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c13",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "MICROSECOND"
},
"children": []
},
{
"name": "c14",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c15",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "SECOND"
},
"children": []
},
{
"name": "c16",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "MILLISECOND",
"timezone": "UTC"
},
"children": []
},
{
"name": "c17",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "MICROSECOND",
"timezone": "Africa/Johannesburg"
},
"children": []
},
{
"name": "c18",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c19",
"nullable": false,
"type": {
"name": "interval",
"unit": "DAY_TIME"
},
"children": []
},
{
"name": "c20",
"nullable": false,
"type": {
"name": "interval",
"unit": "YEAR_MONTH"
},
"children": []
},
{
"name": "c21",
"nullable": false,
"type": {
"name": "list"
},
"children": [
{
"name": "item",
"nullable": true,
"type": {
"name": "bool"
},
"children": []
}
]
},
{
"name": "c22",
"nullable": false,
"type": {
"name": "fixedsizelist",
"listSize": 5
},
"children": [
{
"name": "bools",
"nullable": false,
"type": {
"name": "bool"
},
"children": []
}
]
},
{
"name": "c23",
"nullable": true,
"type": {
"name": "list"
},
"children": [
{
"name": "inner_list",
"nullable": false,
"type": {
"name": "list"
},
"children": [
{
"name": "struct",
"nullable": true,
"type": {
"name": "struct"
},
"children": []
}
]
}
]
},
{
"name": "c24",
"nullable": false,
"type": {
"name": "struct"
},
"children": [
{
"name": "a",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
},
{
"name": "b",
"nullable": false,
"type": {
"name": "int",
"bitWidth": 16,
"isSigned": false
},
"children": []
}
]
},
{
"name": "c25",
"nullable": true,
"type": {
"name": "interval",
"unit": "YEAR_MONTH"
},
"children": []
},
{
"name": "c26",
"nullable": true,
"type": {
"name": "interval",
"unit": "DAY_TIME"
},
"children": []
},
{
"name": "c27",
"nullable": false,
"type": {
"name": "duration",
"unit": "SECOND"
},
"children": []
},
{
"name": "c28",
"nullable": false,
"type": {
"name": "duration",
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c29",
"nullable": false,
"type": {
"name": "duration",
"unit": "MICROSECOND"
},
"children": []
},
{
"name": "c30",
"nullable": false,
"type": {
"name": "duration",
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c31",
"nullable": true,
"children": [],
"type": {
"name": "utf8"
},
"dictionary": {
"id": 123,
"indexType": {
"name": "int",
"bitWidth": 32,
"isSigned": true
},
"isOrdered": true
}
},
{
"name": "c32",
"nullable": true,
"type": {
"name": "largebinary"
},
"children": []
},
{
"name": "c33",
"nullable": true,
"type": {
"name": "largeutf8"
},
"children": []
},
{
"name": "c34",
"nullable": true,
"type": {
"name": "largelist"
},
"children": [
{
"name": "inner_large_list",
"nullable": true,
"type": {
"name": "largelist"
},
"children": [
{
"name": "struct",
"nullable": false,
"type": {
"name": "struct"
},
"children": []
}
]
}
]
}
],
"metadata" : {
"Key": "Value"
}
}"#;
let value: Value = serde_json::from_str(&json).unwrap();
assert_eq!(expected, value);
// convert back to a schema
let value: Value = serde_json::from_str(&json).unwrap();
let schema2 = Schema::from(&value).unwrap();
assert_eq!(schema, schema2);
// Check that empty metadata produces empty value in JSON and can be parsed
let json = r#"{
"fields": [
{
"name": "c1",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
}
],
"metadata": {}
}"#;
let value: Value = serde_json::from_str(&json).unwrap();
let schema = Schema::from(&value).unwrap();
assert!(schema.metadata.is_empty());
// Check that metadata field is not required in the JSON.
let json = r#"{
"fields": [
{
"name": "c1",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
}
]
}"#;
let value: Value = serde_json::from_str(&json).unwrap();
let schema = Schema::from(&value).unwrap();
assert!(schema.metadata.is_empty());
}
#[test]
fn create_schema_string() {
let schema = person_schema();
assert_eq!(schema.to_string(), "first_name: Utf8, \
last_name: Utf8, \
address: Struct([\
Field { name: \"street\", data_type: Utf8, nullable: false, dict_id: 0, dict_is_ordered: false }, \
Field { name: \"zip\", data_type: UInt16, nullable: false, dict_id: 0, dict_is_ordered: false }]), \
interests: Dictionary(Int32, Utf8)")
}
#[test]
fn schema_field_accessors() {
let schema = person_schema();
// test schema accessors
assert_eq!(schema.fields().len(), 4);
// test field accessors
let first_name = &schema.fields()[0];
assert_eq!(first_name.name(), "first_name");
assert_eq!(first_name.data_type(), &DataType::Utf8);
assert_eq!(first_name.is_nullable(), false);
assert_eq!(first_name.dict_id(), None);
assert_eq!(first_name.dict_is_ordered(), None);
let interests = &schema.fields()[3];
assert_eq!(interests.name(), "interests");
assert_eq!(
interests.data_type(),
&DataType::Dictionary(Box::new(DataType::Int32), Box::new(DataType::Utf8))
);
assert_eq!(interests.dict_id(), Some(123));
assert_eq!(interests.dict_is_ordered(), Some(true));
}
#[test]
#[should_panic(
expected = "Unable to get field named \\\"nickname\\\". Valid fields: [\\\"first_name\\\", \\\"last_name\\\", \\\"address\\\", \\\"interests\\\"]"
)]
fn schema_index_of() {
let schema = person_schema();
assert_eq!(schema.index_of("first_name").unwrap(), 0);
assert_eq!(schema.index_of("last_name").unwrap(), 1);
schema.index_of("nickname").unwrap();
}
#[test]
#[should_panic(
expected = "Unable to get field named \\\"nickname\\\". Valid fields: [\\\"first_name\\\", \\\"last_name\\\", \\\"address\\\", \\\"interests\\\"]"
)]
fn schema_field_with_name() {
let schema = person_schema();
assert_eq!(
schema.field_with_name("first_name").unwrap().name(),
"first_name"
);
assert_eq!(
schema.field_with_name("last_name").unwrap().name(),
"last_name"
);
schema.field_with_name("nickname").unwrap();
}
#[test]
fn schema_field_with_dict_id() {
let schema = person_schema();
let fields_dict_123: Vec<_> = schema
.fields_with_dict_id(123)
.iter()
.map(|f| f.name())
.collect();
assert_eq!(fields_dict_123, vec!["interests"]);
assert!(schema.fields_with_dict_id(456).is_empty());
}
#[test]
fn schema_equality() {
let schema1 = Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Float64, true),
Field::new("c3", DataType::LargeBinary, true),
]);
let schema2 = Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Float64, true),
Field::new("c3", DataType::LargeBinary, true),
]);
assert_eq!(schema1, schema2);
let schema3 = Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Float32, true),
]);
let schema4 = Schema::new(vec![
Field::new("C1", DataType::Utf8, false),
Field::new("C2", DataType::Float64, true),
]);
assert!(schema1 != schema3);
assert!(schema1 != schema4);
assert!(schema2 != schema3);
assert!(schema2 != schema4);
assert!(schema3 != schema4);
}
#[test]
fn test_arrow_native_type_to_json() {
assert_eq!(Some(Bool(true)), true.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i8.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i16.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i32.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i64.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u8.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u16.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u32.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u64.into_json_value());
assert_eq!(
Some(VNumber(Number::from_f64(0.01f64).unwrap())),
0.01.into_json_value()
);
assert_eq!(
Some(VNumber(Number::from_f64(0.01f64).unwrap())),
0.01f64.into_json_value()
);
assert_eq!(None, NAN.into_json_value());
}
fn person_schema() -> Schema {
Schema::new(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
),
Field::new_dict(
"interests",
DataType::Dictionary(Box::new(DataType::Int32), Box::new(DataType::Utf8)),
true,
123,
true,
),
])
}
#[test]
fn test_schema_merge() -> Result<()> {
let merged = Schema::try_merge(&[
Schema::new(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![Field::new("zip", DataType::UInt16, false)]),
false,
),
]),
Schema::new_with_metadata(
vec![
// nullable merge
Field::new("last_name", DataType::Utf8, true),
Field::new(
"address",
DataType::Struct(vec![
// add new nested field
Field::new("street", DataType::Utf8, false),
// nullable merge on nested field
Field::new("zip", DataType::UInt16, true),
]),
false,
),
// new field
Field::new("number", DataType::Utf8, true),
],
[("foo".to_string(), "bar".to_string())]
.iter()
.cloned()
.collect::<HashMap<String, String>>(),
),
])?;
assert_eq!(
merged,
Schema::new_with_metadata(
vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, true),
Field::new(
"address",
DataType::Struct(vec![
Field::new("zip", DataType::UInt16, true),
Field::new("street", DataType::Utf8, false),
]),
false,
),
Field::new("number", DataType::Utf8, true),
],
[("foo".to_string(), "bar".to_string())]
.iter()
.cloned()
.collect::<HashMap<String, String>>()
)
);
// support merge union fields
assert_eq!(
Schema::try_merge(&[
Schema::new(vec![Field::new(
"c1",
DataType::Union(vec![
Field::new("c11", DataType::Utf8, true),
Field::new("c12", DataType::Utf8, true),
]),
false
),]),
Schema::new(vec![Field::new(
"c1",
DataType::Union(vec![
Field::new("c12", DataType::Utf8, true),
Field::new("c13", DataType::Time64(TimeUnit::Second), true),
]),
false
),])
])?,
Schema::new(vec![Field::new(
"c1",
DataType::Union(vec![
Field::new("c11", DataType::Utf8, true),
Field::new("c12", DataType::Utf8, true),
Field::new("c13", DataType::Time64(TimeUnit::Second), true),
]),
false
),]),
);
// incompatible field should throw error
assert!(Schema::try_merge(&[
Schema::new(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
]),
Schema::new(vec![Field::new("last_name", DataType::Int64, false),])
])
.is_err());
// incompatible metadata should throw error
assert!(Schema::try_merge(&[
Schema::new_with_metadata(
vec![Field::new("first_name", DataType::Utf8, false)],
[("foo".to_string(), "bar".to_string()),]
.iter()
.cloned()
.collect::<HashMap<String, String>>()
),
Schema::new_with_metadata(
vec![Field::new("last_name", DataType::Utf8, false)],
[("foo".to_string(), "baz".to_string()),]
.iter()
.cloned()
.collect::<HashMap<String, String>>()
)
])
.is_err());
Ok(())
}
}
#[cfg(all(
test,
any(target_arch = "x86", target_arch = "x86_64"),
feature = "simd"
))]
mod arrow_numeric_type_tests {
use crate::datatypes::{
ArrowNumericType, Float32Type, Float64Type, Int32Type, Int64Type, Int8Type,
UInt16Type,
};
use packed_simd::*;
use FromCast;
/// calculate the expected mask by iterating over all bits
macro_rules! expected_mask {
($T:ty, $MASK:expr) => {{
let mask = $MASK;
// simd width of all types is currently 64 bytes -> 512 bits
let lanes = 64 / std::mem::size_of::<$T>();
// translate each set bit into a value of all ones (-1) of the correct type
(0..lanes)
.map(|i| (if (mask & (1 << i)) != 0 { -1 } else { 0 }))
.collect::<Vec<$T>>()
}};
}
#[test]
fn test_mask_f64() {
let mask = 0b10101010;
let actual = Float64Type::mask_from_u64(mask);
let expected = expected_mask!(i64, mask);
let expected = m64x8::from_cast(i64x8::from_slice_unaligned(expected.as_slice()));
assert_eq!(expected, actual);
}
#[test]
fn test_mask_u64() {
let mask = 0b01010101;
let actual = Int64Type::mask_from_u64(mask);
let expected = expected_mask!(i64, mask);
let expected = m64x8::from_cast(i64x8::from_slice_unaligned(expected.as_slice()));
assert_eq!(expected, actual);
}
#[test]
fn test_mask_f32() {
let mask = 0b10101010_10101010;
let actual = Float32Type::mask_from_u64(mask);
let expected = expected_mask!(i32, mask);
let expected =
m32x16::from_cast(i32x16::from_slice_unaligned(expected.as_slice()));
assert_eq!(expected, actual);
}
#[test]
fn test_mask_i32() {
let mask = 0b01010101_01010101;
let actual = Int32Type::mask_from_u64(mask);
let expected = expected_mask!(i32, mask);
let expected =
m32x16::from_cast(i32x16::from_slice_unaligned(expected.as_slice()));
assert_eq!(expected, actual);
}
#[test]
fn test_mask_u16() {
let mask = 0b01010101_01010101_10101010_10101010;
let actual = UInt16Type::mask_from_u64(mask);
let expected = expected_mask!(i16, mask);
dbg!(&expected);
let expected =
m16x32::from_cast(i16x32::from_slice_unaligned(expected.as_slice()));
assert_eq!(expected, actual);
}
#[test]
fn test_mask_i8() {
let mask =
0b01010101_01010101_10101010_10101010_01010101_01010101_10101010_10101010;
let actual = Int8Type::mask_from_u64(mask);
let expected = expected_mask!(i8, mask);
let expected = m8x64::from_cast(i8x64::from_slice_unaligned(expected.as_slice()));
assert_eq!(expected, actual);
}
}
| 36.809732 | 154 | 0.452755 |
267a389a1d059744d104619bfc81ae839b9e0adb | 10,035 | mod app;
mod plot;
mod read;
mod stats;
use std::env;
#[macro_use]
extern crate derive_builder;
#[macro_use]
extern crate log;
use chrono::Duration;
use clap::ArgMatches;
use regex::Regex;
use simplelog::{ColorChoice, ConfigBuilder, LevelFilter, TermLogger, TerminalMode};
use yansi::Paint;
/// True if vec has al least 'min' elements
fn assert_data<T>(vec: &[T], min: usize) -> bool {
if vec.len() < min {
warn!("Not enough data to process");
}
vec.len() >= min
}
/// Sets up color choices and verbosity in the two libraries used for output:
/// simplelog and yansi
fn configure_output(option: &str, verbose: bool) {
let mut color_choice = ColorChoice::Auto;
match option {
"no" => {
Paint::disable();
color_choice = ColorChoice::Never;
}
"auto" => match env::var("TERM") {
Ok(value) if value == "dumb" => Paint::disable(),
_ => {
if atty::isnt(atty::Stream::Stdout) {
Paint::disable();
}
}
},
"yes" => {
color_choice = ColorChoice::Always;
}
_ => (),
};
if let Err(err) = TermLogger::init(
if verbose {
LevelFilter::Debug
} else {
LevelFilter::Info
},
ConfigBuilder::new()
.set_time_level(LevelFilter::Trace)
.set_thread_level(LevelFilter::Trace)
.set_target_level(LevelFilter::Trace)
.build(),
TerminalMode::Stderr,
color_choice,
) {
// We trigger this error when unit testing this fn
eprintln!("Error: {}", err);
}
}
fn parse_duration(duration: &str) -> Result<Duration, humantime::DurationError> {
match humantime::parse_duration(duration) {
Ok(d) => Ok(Duration::milliseconds(d.as_millis() as i64)),
Err(error) => Err(error),
}
}
/// Build a reader able to read floats (potentially capturing them with regex)
/// from an input source.
fn get_float_reader(matches: &ArgMatches) -> Result<read::DataReader, ()> {
let mut builder = read::DataReaderBuilder::default();
if matches.is_present("min") || matches.is_present("max") {
let min = matches.value_of_t("min").unwrap_or(f64::NEG_INFINITY);
let max = matches.value_of_t("max").unwrap_or(f64::INFINITY);
if min > max {
error!("Minimum should be smaller than maximum");
return Err(());
}
builder.range(min..max);
}
if let Some(string) = matches.value_of("regex") {
match Regex::new(string) {
Ok(re) => {
builder.regex(re);
}
_ => {
error!("Failed to parse regex {}", string);
return Err(());
}
};
}
Ok(builder.build().unwrap())
}
/// Implements the hist cli-subcommand
fn histogram(matches: &ArgMatches) -> i32 {
let reader = match get_float_reader(matches) {
Ok(r) => r,
_ => return 2,
};
let vec = reader.read(matches.value_of("input").unwrap());
if !assert_data(&vec, 1) {
return 1;
}
let stats = stats::Stats::new(&vec);
let width = matches.value_of_t("width").unwrap();
let mut intervals: usize = matches.value_of_t("intervals").unwrap();
intervals = intervals.min(vec.len());
let mut histogram =
plot::Histogram::new(intervals, (stats.max - stats.min) / intervals as f64, stats);
histogram.load(&vec);
print!("{:width$}", histogram, width = width);
0
}
/// Implements the plot cli-subcommand
fn plot(matches: &ArgMatches) -> i32 {
let reader = match get_float_reader(matches) {
Ok(r) => r,
_ => return 2,
};
let vec = reader.read(matches.value_of("input").unwrap());
if !assert_data(&vec, 1) {
return 1;
}
let mut plot = plot::XyPlot::new(
matches.value_of_t("width").unwrap(),
matches.value_of_t("height").unwrap(),
stats::Stats::new(&vec),
);
plot.load(&vec);
print!("{}", plot);
0
}
/// Implements the matches cli-subcommand
fn matchbar(matches: &ArgMatches) -> i32 {
let reader = read::DataReader::default();
let width = matches.value_of_t("width").unwrap();
print!(
"{:width$}",
reader.read_matches(
matches.value_of("input").unwrap(),
matches.values_of("match").unwrap().collect()
),
width = width
);
0
}
/// Implements the common-terms cli-subcommand
fn common_terms(matches: &ArgMatches) -> i32 {
let mut builder = read::DataReaderBuilder::default();
if let Some(string) = matches.value_of("regex") {
match Regex::new(string) {
Ok(re) => {
builder.regex(re);
}
_ => {
error!("Failed to parse regex {}", string);
return 1;
}
};
} else {
builder.regex(Regex::new("(.*)").unwrap());
};
let reader = builder.build().unwrap();
let width = matches.value_of_t("width").unwrap();
let lines = matches.value_of_t("lines").unwrap();
if lines < 1 {
error!("You should specify a potitive number of lines");
return 2;
};
print!(
"{:width$}",
reader.read_terms(matches.value_of("input").unwrap(), lines),
width = width
);
0
}
/// Implements the timehist cli-subcommand
fn timehist(matches: &ArgMatches) -> i32 {
let mut builder = read::TimeReaderBuilder::default();
if let Some(string) = matches.value_of("regex") {
match Regex::new(string) {
Ok(re) => {
builder.regex(re);
}
_ => {
error!("Failed to parse regex {}", string);
return 2;
}
};
}
if let Some(as_str) = matches.value_of("format") {
builder.ts_format(as_str.to_string());
}
builder.early_stop(matches.is_present("early-stop"));
if let Some(duration) = matches.value_of("duration") {
match parse_duration(duration) {
Ok(d) => builder.duration(d),
Err(err) => {
error!("Failed to parse duration {}: {}", duration, err);
return 2;
}
};
};
let width = matches.value_of_t("width").unwrap();
let reader = builder.build().unwrap();
let vec = reader.read(matches.value_of("input").unwrap());
if assert_data(&vec, 2) {
let mut timehist = plot::TimeHistogram::new(matches.value_of_t("intervals").unwrap(), &vec);
timehist.load(&vec);
print!("{:width$}", timehist, width = width);
};
0
}
/// Implements the timehist cli-subcommand
fn splittime(matches: &ArgMatches) -> i32 {
let mut builder = read::SplitTimeReaderBuilder::default();
let string_list: Vec<String> = match matches.values_of("match") {
Some(s) => s.map(|s| s.to_string()).collect(),
None => {
error!("At least a match is needed");
return 2;
}
};
if string_list.len() > 5 {
error!("Only 5 different sub-groups are supported");
return 2;
}
if let Some(as_str) = matches.value_of("format") {
builder.ts_format(as_str.to_string());
}
builder.matches(string_list.iter().map(|s| s.to_string()).collect());
let width = matches.value_of_t("width").unwrap();
let reader = builder.build().unwrap();
let vec = reader.read(matches.value_of("input").unwrap());
if assert_data(&vec, 2) {
let timehist = plot::SplitTimeHistogram::new(
matches.value_of_t("intervals").unwrap(),
string_list,
&vec,
);
print!("{:width$}", timehist, width = width);
};
0
}
fn main() {
let matches = app::get_app().get_matches();
configure_output(
matches.value_of("color").unwrap(),
matches.is_present("verbose"),
);
std::process::exit(match matches.subcommand() {
Some(("hist", subcommand_matches)) => histogram(subcommand_matches),
Some(("plot", subcommand_matches)) => plot(subcommand_matches),
Some(("matches", subcommand_matches)) => matchbar(subcommand_matches),
Some(("timehist", subcommand_matches)) => timehist(subcommand_matches),
Some(("common-terms", subcommand_matches)) => common_terms(subcommand_matches),
Some(("split-timehist", subcommand_matches)) => splittime(subcommand_matches),
_ => unreachable!("Invalid subcommand"),
});
}
#[cfg(test)]
mod tests {
use super::*;
use yansi::Color::Blue;
#[test]
fn test_output_yes() {
Paint::enable();
configure_output("yes", true);
let display = format!("{}", Blue.paint("blue"));
assert_eq!("\u{1b}[34mblue\u{1b}[0m", display);
assert_eq!(LevelFilter::Debug, log::max_level());
}
#[test]
fn test_output_no() {
Paint::enable();
configure_output("no", false);
let display = format!("{}", Blue.paint("blue"));
assert_eq!("blue", display);
assert_eq!(LevelFilter::Info, log::max_level());
}
#[test]
fn test_output_auto() {
Paint::enable();
env::set_var("TERM", "dumb");
configure_output("auto", false);
let display = format!("{}", Blue.paint("blue"));
assert_eq!("blue", display);
}
#[test]
fn test_duration() {
assert_eq!(
parse_duration("2h 30m 5s 100ms"),
Ok(Duration::milliseconds(
2 * 60 * 60000 + 30 * 60000 + 5000 + 100
))
);
assert_eq!(parse_duration("3days"), Ok(Duration::days(3)));
assert!(parse_duration("bananas").is_err());
}
#[test]
fn test_assert_data() {
let v = vec![true];
assert!(assert_data(&v, 1));
assert!(!assert_data(&v, 2));
let v = Vec::<bool>::new();
assert!(!assert_data(&v, 1));
}
}
| 30.50152 | 100 | 0.55715 |
fc45367c359cbd580b7db75361cf9d31bd6769ed | 6,775 | use mars_raw_utils::{
constants,
print,
util,
nsyt
};
#[macro_use]
extern crate clap;
use std::process;
use clap::{Arg, App};
fn main() {
let matches = App::new(crate_name!())
.version(crate_version!())
.author(crate_authors!())
.arg(Arg::with_name(constants::param::PARAM_VERBOSE)
.short(constants::param::PARAM_VERBOSE)
.help("Show verbose output"))
.arg(Arg::with_name("camera")
.short("c")
.long("camera")
.value_name("camera")
.help("M20 Camera Instrument(s)")
.required(false)
.takes_value(true)
.multiple(true))
.arg(Arg::with_name("sol")
.short("s")
.long("sol")
.value_name("sol")
.help("Mission Sol")
.required(false)
.takes_value(true))
.arg(Arg::with_name("minsol")
.short("m")
.long("minsol")
.value_name("minsol")
.help("Starting Mission Sol")
.required(false)
.takes_value(true))
.arg(Arg::with_name("maxsol")
.short("M")
.long("maxsol")
.value_name("maxsol")
.help("Ending Mission Sol")
.required(false)
.takes_value(true))
.arg(Arg::with_name("list")
.short("l")
.long("list")
.value_name("list")
.help("Don't download, only list results")
.takes_value(false)
.required(false))
.arg(Arg::with_name("thumbnails")
.short("t")
.long("thumbnails")
.value_name("thumbnails")
.help("Download thumbnails in the results")
.takes_value(false)
.required(false))
.arg(Arg::with_name("num")
.short("N")
.long("num")
.value_name("num")
.help("Max number of results")
.required(false)
.takes_value(true))
.arg(Arg::with_name("page")
.short("p")
.long("page")
.value_name("page")
.help("Results page (starts at 1)")
.required(false)
.takes_value(true))
.arg(Arg::with_name("seqid")
.short("S")
.long("seqid")
.value_name("seqid")
.help("Specific sequence id or substring")
.required(false)
.takes_value(true))
.arg(Arg::with_name("instruments")
.short("i")
.long("instruments")
.value_name("instruments")
.help("List camera instrument and exit")
.takes_value(false)
.required(false))
.arg(Arg::with_name(constants::param::PARAM_ONLY_NEW)
.short(constants::param::PARAM_ONLY_NEW_SHORT)
.help("Only new images. Skipped processed images."))
.get_matches();
let instruments = nsyt::remote::make_instrument_map();
if matches.is_present(constants::param::PARAM_VERBOSE) {
print::set_verbose(true);
}
if matches.is_present("instruments") {
instruments.print_instruments();
process::exit(0);
}
let mut num_per_page = 100;
let mut page = None;
let mut minsol = 1000000;
let mut maxsol = -1;
let mut sol = -1;
let mut thumbnails = false;
let mut search = "";
let mut list_only = false;
let only_new = matches.is_present(constants::param::PARAM_ONLY_NEW);
let mut camera_inputs: Vec<&str> = Vec::default();
if matches.is_present("camera") {
camera_inputs = matches.values_of("camera").unwrap().collect();
}
let camera_ids_res = instruments.find_remote_instrument_names_fromlist(&camera_inputs);
let cameras = match camera_ids_res {
Err(_e) => {
eprintln!("Invalid camera instrument(s) specified");
process::exit(1);
},
Ok(v) => v,
};
if matches.is_present("thumbnails") {
thumbnails = true;
}
if matches.is_present("list") {
list_only = true;
}
if matches.is_present("seqid") {
search = matches.value_of("seqid").unwrap();
}
if matches.is_present("num") {
let s = matches.value_of("num").unwrap();
if util::string_is_valid_f32(&s) {
num_per_page = s.parse::<i32>().unwrap();
} else {
eprintln!("Error: Invalid number specified");
process::exit(1);
}
}
if matches.is_present("page") {
let s = matches.value_of("page").unwrap();
if util::string_is_valid_f32(&s) {
page = Some(s.parse::<i32>().unwrap() - 1);
} else {
eprintln!("Error: Invalid number specified");
process::exit(1);
}
}
if matches.is_present("minsol") {
let s = matches.value_of("minsol").unwrap();
if util::string_is_valid_f32(&s) {
minsol = s.parse::<i32>().unwrap();
} else {
eprintln!("Error: Invalid number specified");
process::exit(1);
}
}
if matches.is_present("maxsol") {
let s = matches.value_of("maxsol").unwrap();
if util::string_is_valid_f32(&s) {
maxsol = s.parse::<i32>().unwrap();
} else {
eprintln!("Error: Invalid number specified");
process::exit(1);
}
}
if matches.is_present("sol") {
let s = matches.value_of("sol").unwrap();
if util::string_is_valid_f32(&s) {
sol = s.parse::<i32>().unwrap();
} else {
eprintln!("Error: Invalid number specified");
process::exit(1);
}
}
if sol >= 0 {
minsol = sol;
maxsol = sol;
}
nsyt::remote::print_header();
match nsyt::remote::remote_fetch(&cameras, num_per_page, page, minsol, maxsol, thumbnails, list_only, search, only_new) {
Ok(c) => println!("{} images found", c),
Err(e) => eprintln!("Error: {}", e)
}
}
| 32.261905 | 125 | 0.469225 |
692d642d12d31287433bf675ee1996ae63f2531e | 2,285 | use crate::client_died_error_holder::DiedType;
use crate::client_died_error_holder::SomethingDiedErrorHolder;
use futures::channel::mpsc;
use futures::channel::mpsc::UnboundedReceiver;
use futures::channel::mpsc::UnboundedSender;
use futures::stream::Stream;
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
pub(crate) trait ErrorAwareDrop {
type DiedType: DiedType;
fn drop_with_error(self, error: crate::Error);
}
pub(crate) struct DeathAwareSender<T: ErrorAwareDrop> {
tx: UnboundedSender<T>,
conn_died_error_holder: SomethingDiedErrorHolder<T::DiedType>,
}
pub(crate) struct DeathAwareReceiver<T: ErrorAwareDrop> {
rx: UnboundedReceiver<T>,
conn_died_error_holder: SomethingDiedErrorHolder<T::DiedType>,
}
impl<T: ErrorAwareDrop> Drop for DeathAwareReceiver<T> {
fn drop(&mut self) {
self.rx.close();
while let Ok(Some(m)) = self.rx.try_next() {
m.drop_with_error(self.conn_died_error_holder.error());
}
}
}
impl<T: ErrorAwareDrop> Clone for DeathAwareSender<T> {
fn clone(&self) -> Self {
DeathAwareSender {
tx: self.tx.clone(),
conn_died_error_holder: self.conn_died_error_holder.clone(),
}
}
}
impl<T: ErrorAwareDrop> DeathAwareSender<T> {
pub fn unbounded_send_recover(&self, msg: T) -> Result<(), (T, crate::Error)> {
self.tx
.unbounded_send(msg)
.map_err(|e| (e.into_inner(), self.conn_died_error_holder.error()))
}
pub fn unbounded_send(&self, msg: T) -> crate::Result<()> {
self.unbounded_send_recover(msg).map_err(|(_, e)| e)
}
}
impl<T: ErrorAwareDrop> Stream for DeathAwareReceiver<T> {
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T>> {
Pin::new(&mut self.rx).poll_next(cx)
}
}
pub(crate) fn death_aware_channel<T: ErrorAwareDrop>(
conn_died_error_holder: SomethingDiedErrorHolder<T::DiedType>,
) -> (DeathAwareSender<T>, DeathAwareReceiver<T>) {
let (tx, rx) = mpsc::unbounded();
let tx = DeathAwareSender {
tx,
conn_died_error_holder: conn_died_error_holder.clone(),
};
let rx = DeathAwareReceiver {
rx,
conn_died_error_holder,
};
(tx, rx)
}
| 28.5625 | 85 | 0.66302 |
76f0c3d20a9570ffede219dd0aedba9e1c044b01 | 6,202 | extern crate clap;
extern crate keepass;
extern crate rpassword;
extern crate termcolor;
mod diff;
use clap::{App, Arg};
use diff::{group::Group, Diff, DiffDisplay};
use keepass::{result::Error, result::Result, Database};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use std::path::Path;
use std::{fs::File, io::Read};
fn main() -> Result<()> {
let matches = App::new("keepass-diff")
.version("0.3.0")
.about("Shows differences between two .kdbx files")
.author("Joern Bernhardt")
.arg(
Arg::with_name("INPUT-A")
.help("Sets the first file")
.required(true)
.index(1),
)
.arg(
Arg::with_name("INPUT-B")
.help("Sets the second file")
.required(true)
.index(2),
)
.arg(
Arg::with_name("no-color")
.short("C")
.long("no-color")
.help("Disables color output")
.takes_value(false),
)
.arg(
Arg::with_name("password-a")
.long("password-a")
.help("Sets the password for the first file (will be asked for if omitted)")
.takes_value(true),
)
.arg(
Arg::with_name("password-b")
.long("password-b")
.help("Sets the password for the second file (will be asked for if omitted)")
.takes_value(true),
)
.arg(
Arg::with_name("passwords")
.long("passwords")
.help("Sets the password for both files (if it's the same for both files)")
.takes_value(true),
)
.arg(
Arg::with_name("no-password-a")
.long("no-password-a")
.help("Sets no password for the first file (and will not ask for it)")
.takes_value(false),
)
.arg(
Arg::with_name("no-password-b")
.long("no-password-b")
.help("Sets no password for the second file (and will not ask for it)")
.takes_value(false),
)
.arg(
Arg::with_name("no-passwords")
.long("no-passwords")
.help("Sets no password for both files (and will not ask for both files)")
.takes_value(false),
)
.arg(
Arg::with_name("keyfile-a")
.long("keyfile-a")
.help("Sets the key file for the first file")
.takes_value(true),
)
.arg(
Arg::with_name("keyfile-b")
.long("keyfile-b")
.help("Sets the key file for the second file")
.takes_value(true),
)
.arg(
Arg::with_name("keyfiles")
.long("keyfiles")
.help("Sets the same key file for both files (keyfile-a and keyfile-b would take precedence if set as well)")
.takes_value(true),
)
.get_matches();
match (matches.value_of("INPUT-A"), matches.value_of("INPUT-B")) {
(Some(file_a), Some(file_b)) => {
let pass_a = match (
matches.value_of("password-a"),
matches.value_of("passwords"),
matches.is_present("no-password-a"),
matches.is_present("no-passwords"),
) {
(Some(password), _, _, _) => Some(String::from(password)),
(_, Some(password), _, _) => Some(String::from(password)),
(_, _, true, _) => None,
(_, _, _, true) => None,
_ => {
print!("Password for file {}: ", file_a);
let password = rpassword::prompt_password_stdout("")
.map(|s| if s == "" { None } else { Some(s) })
.unwrap_or(None);
password
}
};
let pass_b = match (
matches.value_of("password-b"),
matches.value_of("passwords"),
matches.is_present("no-password-b"),
matches.is_present("no-passwords"),
) {
(Some(password), _, _, _) => Some(String::from(password)),
(_, Some(password), _, _) => Some(String::from(password)),
(_, _, true, _) => None,
(_, _, _, true) => None,
_ => {
print!("Password for file {}: ", file_b);
let password_option: Option<String> = rpassword::prompt_password_stdout("")
.map(|s| if s == "" { None } else { Some(s) })
.unwrap_or(None);
password_option
}
};
let keyfile_a: Option<&str> = matches
.value_of("keyfile-a")
.or(matches.value_of("keyfiles"));
let keyfile_b: Option<&str> = matches
.value_of("keyfile-b")
.or(matches.value_of("keyfiles"));
let use_color: bool = !matches.is_present("no-color");
let db_a = kdbx_to_group(file_a, pass_a, keyfile_a).expect("Error opening database A");
let db_b = kdbx_to_group(file_b, pass_b, keyfile_b).expect("Error opening database B");
let delta = db_a.diff(&db_b);
println!(
"{}",
DiffDisplay {
inner: delta,
depth: 0,
use_color
}
);
}
_ => println!("Need two .kdbx files as arguments"),
}
Ok(())
}
pub fn kdbx_to_group(
file: &str,
password: Option<String>,
keyfile_path: Option<&str>,
) -> Result<Group> {
let mut keyfile = keyfile_path.map(|path| File::open(Path::new(path)).unwrap());
File::open(Path::new(file))
.map_err(|e| Error::from(e))
.and_then(|mut db_file| {
let db = Database::open(
&mut db_file,
password.as_ref().map(|s| s.as_str()),
keyfile.as_mut().map(|f| f as &mut dyn Read),
);
db
})
.map(|db: Database| Group::from_keepass(&db.root))
}
pub fn set_fg(color: Option<Color>) {
let mut stdout = StandardStream::stdout(ColorChoice::Always);
stdout.set_color(ColorSpec::new().set_fg(color)).expect("Setting colors in your console failed. Please use the --no-color flag to disable colors if the error persists.");
}
| 33.344086 | 174 | 0.516446 |
90b67f936369dc33848a332828744363c54eb085 | 323,713 | pub use crate::common::*;
use crate::utils;
use crate::{b_2 as wsnt, soap_envelope as soapenv, validate::Validate, xmlmime as xmime, xop};
use macro_utils::*;
use std::io::{Read, Write};
use std::str::FromStr;
use xsd_types::types as xs;
use yaserde::{YaDeserialize, YaSerialize};
// Base class for physical entities like inputs and outputs.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DeviceEntity {
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for DeviceEntity {}
// User readable name. Length up to 64 characters.
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Name(pub String);
impl Validate for Name {
fn validate(&self) -> Result<(), String> {
if self.0.len() > "64".parse().unwrap() {
return Err(format!(
"MaxLength validation error. \nExpected: 0 length <= 64 \nActual: 0 length == {}",
self.0.len()
));
}
Ok(())
}
}
// Rectangle defined by lower left corner position and size. Units are pixel.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IntRectangle {
#[yaserde(attribute, rename = "x")]
pub x: i32,
#[yaserde(attribute, rename = "y")]
pub y: i32,
#[yaserde(attribute, rename = "width")]
pub width: i32,
#[yaserde(attribute, rename = "height")]
pub height: i32,
}
impl Validate for IntRectangle {}
// Range of a rectangle. The rectangle itself is defined by lower left corner
// position and size. Units are pixel.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IntRectangleRange {
// Range of X-axis.
#[yaserde(prefix = "tt", rename = "XRange")]
pub x_range: IntRange,
// Range of Y-axis.
#[yaserde(prefix = "tt", rename = "YRange")]
pub y_range: IntRange,
// Range of width.
#[yaserde(prefix = "tt", rename = "WidthRange")]
pub width_range: IntRange,
// Range of height.
#[yaserde(prefix = "tt", rename = "HeightRange")]
pub height_range: IntRange,
}
impl Validate for IntRectangleRange {}
// Range of values greater equal Min value and less equal Max value.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FloatRange {
#[yaserde(prefix = "tt", rename = "Min")]
pub min: f64,
#[yaserde(prefix = "tt", rename = "Max")]
pub max: f64,
}
impl Validate for FloatRange {}
// Range of duration greater equal Min duration and less equal Max duration.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DurationRange {
#[yaserde(prefix = "tt", rename = "Min")]
pub min: xs::Duration,
#[yaserde(prefix = "tt", rename = "Max")]
pub max: xs::Duration,
}
impl Validate for DurationRange {}
// List of values.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IntList {
#[yaserde(prefix = "tt", rename = "Items")]
pub items: Vec<i32>,
}
impl Validate for IntList {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct IntAttrList(pub Vec<i32>);
impl Validate for IntAttrList {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct FloatAttrList(pub Vec<f64>);
impl Validate for FloatAttrList {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct StringAttrList(pub Vec<String>);
impl Validate for StringAttrList {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct ReferenceTokenList(pub Vec<ReferenceToken>);
impl Validate for ReferenceTokenList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FloatList {
#[yaserde(prefix = "tt", rename = "Items")]
pub items: Vec<f64>,
}
impl Validate for FloatList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct StringItems {
#[yaserde(prefix = "tt", rename = "Item")]
pub item: Vec<String>,
}
impl Validate for StringItems {}
// pub type StringList = StringAttrList;
// pub type IntRange = IntRange;
// pub type IntList = IntAttrList;
// pub type FloatRange = FloatRange;
// pub type FloatList = FloatAttrList;
// pub type DurationRange = DurationRange;
// pub type IntRectangleRange = IntRectangleRange;
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnyHolder {}
impl Validate for AnyHolder {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSource {
// Frame rate in frames per second.
#[yaserde(prefix = "tt", rename = "Framerate")]
pub framerate: f64,
// Horizontal and vertical resolution
#[yaserde(prefix = "tt", rename = "Resolution")]
pub resolution: VideoResolution,
// Optional configuration of the image sensor.
#[yaserde(prefix = "tt", rename = "Imaging")]
pub imaging: Option<ImagingSettings>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoSourceExtension>,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoSource {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceExtension {
// Optional configuration of the image sensor. To be used if imaging service
// 2.00 is supported.
#[yaserde(prefix = "tt", rename = "Imaging")]
pub imaging: Option<ImagingSettings20>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoSourceExtension2>,
}
impl Validate for VideoSourceExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceExtension2 {}
impl Validate for VideoSourceExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioSource {
// number of available audio channels. (1: mono, 2: stereo)
#[yaserde(prefix = "tt", rename = "Channels")]
pub channels: i32,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioSource {}
// A media profile consists of a set of media configurations. Media profiles are
// used by a client
// to configure properties of a media stream from an NVT.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Profile {
// User readable name of the profile.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Optional configuration of the Video input.
#[yaserde(prefix = "tt", rename = "VideoSourceConfiguration")]
pub video_source_configuration: Option<VideoSourceConfiguration>,
// Optional configuration of the Audio input.
#[yaserde(prefix = "tt", rename = "AudioSourceConfiguration")]
pub audio_source_configuration: Option<AudioSourceConfiguration>,
// Optional configuration of the Video encoder.
#[yaserde(prefix = "tt", rename = "VideoEncoderConfiguration")]
pub video_encoder_configuration: Option<VideoEncoderConfiguration>,
// Optional configuration of the Audio encoder.
#[yaserde(prefix = "tt", rename = "AudioEncoderConfiguration")]
pub audio_encoder_configuration: Option<AudioEncoderConfiguration>,
// Optional configuration of the video analytics module and rule engine.
#[yaserde(prefix = "tt", rename = "VideoAnalyticsConfiguration")]
pub video_analytics_configuration: Option<VideoAnalyticsConfiguration>,
// Optional configuration of the pan tilt zoom unit.
#[yaserde(prefix = "tt", rename = "PTZConfiguration")]
pub ptz_configuration: Option<Ptzconfiguration>,
// Optional configuration of the metadata stream.
#[yaserde(prefix = "tt", rename = "MetadataConfiguration")]
pub metadata_configuration: Option<MetadataConfiguration>,
// Extensions defined in ONVIF 2.0
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ProfileExtension>,
// Unique identifier of the profile.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
// A value of true signals that the profile cannot be deleted. Default is
// false.
#[yaserde(attribute, rename = "fixed")]
pub fixed: Option<bool>,
}
impl Validate for Profile {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ProfileExtension {
// Optional configuration of the Audio output.
#[yaserde(prefix = "tt", rename = "AudioOutputConfiguration")]
pub audio_output_configuration: Option<AudioOutputConfiguration>,
// Optional configuration of the Audio decoder.
#[yaserde(prefix = "tt", rename = "AudioDecoderConfiguration")]
pub audio_decoder_configuration: Option<AudioDecoderConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ProfileExtension2>,
}
impl Validate for ProfileExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ProfileExtension2 {}
impl Validate for ProfileExtension2 {}
// pub type VideoSourceConfiguration = VideoSourceConfiguration;
// pub type AudioSourceConfiguration = AudioSourceConfiguration;
// pub type VideoEncoderConfiguration = VideoEncoderConfiguration;
// pub type AudioEncoderConfiguration = AudioEncoderConfiguration;
// pub type VideoAnalyticsConfiguration = VideoAnalyticsConfiguration;
// pub type Ptzconfiguration = Ptzconfiguration;
// pub type MetadataConfiguration = MetadataConfiguration;
// pub type AudioOutputConfiguration = AudioOutputConfiguration;
// pub type AudioDecoderConfiguration = AudioDecoderConfiguration;
// Base type defining the common properties of a configuration.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ConfigurationEntity {
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for ConfigurationEntity {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceConfiguration {
// Reference to the physical input.
#[yaserde(prefix = "tt", rename = "SourceToken")]
pub source_token: ReferenceToken,
// Rectangle specifying the Video capturing area. The capturing area shall
// not be larger than the whole Video source area.
#[yaserde(prefix = "tt", rename = "Bounds")]
pub bounds: IntRectangle,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoSourceConfigurationExtension>,
// Readonly parameter signalling Source configuration's view mode, for
// devices supporting different view modes as defined in tt:viewModes.
#[yaserde(attribute, rename = "ViewMode")]
pub view_mode: Option<String>,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoSourceConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceConfigurationExtension {
// Optional element to configure rotation of captured image.
// What resolutions a device supports shall be unaffected by the Rotate
// parameters.
#[yaserde(prefix = "tt", rename = "Rotate")]
pub rotate: Option<Rotate>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoSourceConfigurationExtension2>,
}
impl Validate for VideoSourceConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceConfigurationExtension2 {
// Optional element describing the geometric lens distortion. Multiple
// instances for future variable lens support.
#[yaserde(prefix = "tt", rename = "LensDescription")]
pub lens_description: Vec<LensDescription>,
// Optional element describing the scene orientation in the camera’s field
// of view.
#[yaserde(prefix = "tt", rename = "SceneOrientation")]
pub scene_orientation: SceneOrientation,
}
impl Validate for VideoSourceConfigurationExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Rotate {
// Parameter to enable/disable Rotation feature.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: RotateMode,
// Optional parameter to configure how much degree of clockwise rotation of
// image for On mode. Omitting this parameter for On mode means 180 degree
// rotation.
#[yaserde(prefix = "tt", rename = "Degree")]
pub degree: Option<i32>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RotateExtension>,
}
impl Validate for Rotate {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RotateExtension {}
impl Validate for RotateExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum RotateMode {
// Enable the Rotate feature. Degree of rotation is specified Degree
// parameter.
#[yaserde(rename = "OFF")]
Off,
// Disable the Rotate feature.
#[yaserde(rename = "ON")]
On,
// Rotate feature is automatically activated by the device.
#[yaserde(rename = "AUTO")]
Auto,
__Unknown__(String),
}
impl Default for RotateMode {
fn default() -> RotateMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for RotateMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct LensProjection {
// Angle of incidence.
#[yaserde(prefix = "tt", rename = "Angle")]
pub angle: f64,
// Mapping radius as a consequence of the emergent angle.
#[yaserde(prefix = "tt", rename = "Radius")]
pub radius: f64,
// Optional ray absorption at the given angle due to vignetting. A value of
// one means no absorption.
#[yaserde(prefix = "tt", rename = "Transmittance")]
pub transmittance: Option<f64>,
}
impl Validate for LensProjection {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct LensOffset {
// Optional horizontal offset of the lens center in normalized coordinates.
#[yaserde(attribute, rename = "x")]
pub x: Option<f64>,
// Optional vertical offset of the lens center in normalized coordinates.
#[yaserde(attribute, rename = "y")]
pub y: Option<f64>,
}
impl Validate for LensOffset {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct LensDescription {
// Offset of the lens center to the imager center in normalized coordinates.
#[yaserde(prefix = "tt", rename = "Offset")]
pub offset: LensOffset,
// Radial description of the projection characteristics. The resulting curve
// is defined by the B-Spline interpolation
// over the given elements. The element for Radius zero shall not be
// provided. The projection points shall be ordered with ascending Radius.
// Items outside the last projection Radius shall be assumed to be invisible
// (black).
#[yaserde(prefix = "tt", rename = "Projection")]
pub projection: Vec<LensProjection>,
// Compensation of the x coordinate needed for the ONVIF normalized
// coordinate system.
#[yaserde(prefix = "tt", rename = "XFactor")]
pub x_factor: f64,
// Optional focal length of the optical system.
#[yaserde(attribute, rename = "FocalLength")]
pub focal_length: Option<f64>,
}
impl Validate for LensDescription {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceConfigurationOptions {
// Supported range for the capturing area.
// Device that does not support cropped streaming shall express BoundsRange
// option as mentioned below
// BoundsRange->XRange and BoundsRange->YRange with same Min/Max values
// HeightRange and WidthRange Min/Max values same as VideoSource Height and
// Width Limits.
#[yaserde(prefix = "tt", rename = "BoundsRange")]
pub bounds_range: IntRectangleRange,
// List of physical inputs.
#[yaserde(prefix = "tt", rename = "VideoSourceTokensAvailable")]
pub video_source_tokens_available: Vec<ReferenceToken>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoSourceConfigurationOptionsExtension>,
// Maximum number of profiles.
#[yaserde(attribute, rename = "MaximumNumberOfProfiles")]
pub maximum_number_of_profiles: Option<i32>,
}
impl Validate for VideoSourceConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceConfigurationOptionsExtension {
// Options of parameters for Rotation feature.
#[yaserde(prefix = "tt", rename = "Rotate")]
pub rotate: Option<RotateOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoSourceConfigurationOptionsExtension2>,
}
impl Validate for VideoSourceConfigurationOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoSourceConfigurationOptionsExtension2 {
// Scene orientation modes supported by the device for this configuration.
#[yaserde(prefix = "tt", rename = "SceneOrientationMode")]
pub scene_orientation_mode: Vec<SceneOrientationMode>,
}
impl Validate for VideoSourceConfigurationOptionsExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RotateOptions {
// Supported options of Rotate mode parameter.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<RotateMode>,
// List of supported degree value for rotation.
#[yaserde(prefix = "tt", rename = "DegreeList")]
pub degree_list: Option<IntList>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RotateOptionsExtension>,
// After setting the rotation, if a device starts to reboot this value is
// true.
// If a device can handle rotation setting without rebooting this value is
// false.
#[yaserde(attribute, rename = "Reboot")]
pub reboot: Option<bool>,
}
impl Validate for RotateOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RotateOptionsExtension {}
impl Validate for RotateOptionsExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum SceneOrientationMode {
#[yaserde(rename = "MANUAL")]
Manual,
#[yaserde(rename = "AUTO")]
Auto,
__Unknown__(String),
}
impl Default for SceneOrientationMode {
fn default() -> SceneOrientationMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for SceneOrientationMode {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum SceneOrientationOption {
Below,
Horizon,
Above,
__Unknown__(String),
}
impl Default for SceneOrientationOption {
fn default() -> SceneOrientationOption {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for SceneOrientationOption {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SceneOrientation {
// Parameter to assign the way the camera determines the scene orientation.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: SceneOrientationMode,
// Assigned or determined scene orientation based on the Mode. When
// assigning the Mode to AUTO, this field
// is optional and will be ignored by the device. When assigning the Mode to
// MANUAL, this field is required
// and the device will return an InvalidArgs fault if missing.
#[yaserde(prefix = "tt", rename = "Orientation")]
pub orientation: Option<String>,
}
impl Validate for SceneOrientation {}
// Source view modes supported by device.
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct ViewModes(pub String);
impl Validate for ViewModes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoEncoderConfiguration {
// Used video codec, either Jpeg, H.264 or Mpeg4
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: VideoEncoding,
// Configured video resolution
#[yaserde(prefix = "tt", rename = "Resolution")]
pub resolution: VideoResolution,
// Relative value for the video quantizers and the quality of the video. A
// high value within supported quality range means higher quality
#[yaserde(prefix = "tt", rename = "Quality")]
pub quality: f64,
// Optional element to configure rate control related parameters.
#[yaserde(prefix = "tt", rename = "RateControl")]
pub rate_control: Option<VideoRateControl>,
// Optional element to configure Mpeg4 related parameters.
#[yaserde(prefix = "tt", rename = "MPEG4")]
pub mpeg4: Option<Mpeg4Configuration>,
// Optional element to configure H.264 related parameters.
#[yaserde(prefix = "tt", rename = "H264")]
pub h264: Option<H264Configuration>,
// Defines the multicast settings that could be used for video streaming.
#[yaserde(prefix = "tt", rename = "Multicast")]
pub multicast: MulticastConfiguration,
// The rtsp session timeout for the related video stream
#[yaserde(prefix = "tt", rename = "SessionTimeout")]
pub session_timeout: xs::Duration,
// A value of true indicates that frame rate is a fixed value rather than an
// upper limit,
// and that the video encoder shall prioritize frame rate over all other
// adaptable
// configuration values such as bitrate. Default is false.
#[yaserde(attribute, rename = "GuaranteedFrameRate")]
pub guaranteed_frame_rate: Option<bool>,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoEncoderConfiguration {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum VideoEncoding {
#[yaserde(rename = "JPEG")]
Jpeg,
#[yaserde(rename = "MPEG4")]
Mpeg4,
H264,
__Unknown__(String),
}
impl Default for VideoEncoding {
fn default() -> VideoEncoding {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for VideoEncoding {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Mpeg4Profile {
#[yaserde(rename = "SP")]
Sp,
#[yaserde(rename = "ASP")]
Asp,
__Unknown__(String),
}
impl Default for Mpeg4Profile {
fn default() -> Mpeg4Profile {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Mpeg4Profile {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum H264Profile {
Baseline,
Main,
Extended,
High,
__Unknown__(String),
}
impl Default for H264Profile {
fn default() -> H264Profile {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for H264Profile {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoResolution {
// Number of the columns of the Video image.
#[yaserde(prefix = "tt", rename = "Width")]
pub width: i32,
// Number of the lines of the Video image.
#[yaserde(prefix = "tt", rename = "Height")]
pub height: i32,
}
impl Validate for VideoResolution {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoRateControl {
// Maximum output framerate in fps. If an EncodingInterval is provided the
// resulting encoded framerate will be reduced by the given factor.
#[yaserde(prefix = "tt", rename = "FrameRateLimit")]
pub frame_rate_limit: i32,
// Interval at which images are encoded and transmitted. (A value of 1 means
// that every frame is encoded, a value of 2 means that every 2nd frame is
// encoded ...)
#[yaserde(prefix = "tt", rename = "EncodingInterval")]
pub encoding_interval: i32,
// the maximum output bitrate in kbps
#[yaserde(prefix = "tt", rename = "BitrateLimit")]
pub bitrate_limit: i32,
}
impl Validate for VideoRateControl {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Mpeg4Configuration {
// Determines the interval in which the I-Frames will be coded. An entry of
// 1 indicates I-Frames are continuously generated. An entry of 2 indicates
// that every 2nd image is an I-Frame, and 3 only every 3rd frame, etc. The
// frames in between are coded as P or B Frames.
#[yaserde(prefix = "tt", rename = "GovLength")]
pub gov_length: i32,
// the Mpeg4 profile, either simple profile (SP) or advanced simple profile
// (ASP)
#[yaserde(prefix = "tt", rename = "Mpeg4Profile")]
pub mpeg_4_profile: Mpeg4Profile,
}
impl Validate for Mpeg4Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct H264Configuration {
// Group of Video frames length. Determines typically the interval in which
// the I-Frames will be coded. An entry of 1 indicates I-Frames are
// continuously generated. An entry of 2 indicates that every 2nd image is
// an I-Frame, and 3 only every 3rd frame, etc. The frames in between are
// coded as P or B Frames.
#[yaserde(prefix = "tt", rename = "GovLength")]
pub gov_length: i32,
// the H.264 profile, either baseline, main, extended or high
#[yaserde(prefix = "tt", rename = "H264Profile")]
pub h264_profile: H264Profile,
}
impl Validate for H264Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoEncoderConfigurationOptions {
// Range of the quality values. A high value means higher quality.
#[yaserde(prefix = "tt", rename = "QualityRange")]
pub quality_range: IntRange,
// Optional JPEG encoder settings ranges (See also Extension element).
#[yaserde(prefix = "tt", rename = "JPEG")]
pub jpeg: Option<JpegOptions>,
// Optional MPEG-4 encoder settings ranges (See also Extension element).
#[yaserde(prefix = "tt", rename = "MPEG4")]
pub mpeg4: Option<Mpeg4Options>,
// Optional H.264 encoder settings ranges (See also Extension element).
#[yaserde(prefix = "tt", rename = "H264")]
pub h264: Option<H264Options>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoEncoderOptionsExtension>,
// Indicates the support for the GuaranteedFrameRate attribute on the
// VideoEncoderConfiguration element.
#[yaserde(attribute, rename = "GuaranteedFrameRateSupported")]
pub guaranteed_frame_rate_supported: Option<bool>,
}
impl Validate for VideoEncoderConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoEncoderOptionsExtension {
// Optional JPEG encoder settings ranges.
#[yaserde(prefix = "tt", rename = "JPEG")]
pub jpeg: Option<JpegOptions2>,
// Optional MPEG-4 encoder settings ranges.
#[yaserde(prefix = "tt", rename = "MPEG4")]
pub mpeg4: Option<Mpeg4Options2>,
// Optional H.264 encoder settings ranges.
#[yaserde(prefix = "tt", rename = "H264")]
pub h264: Option<H264Options2>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoEncoderOptionsExtension2>,
}
impl Validate for VideoEncoderOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoEncoderOptionsExtension2 {}
impl Validate for VideoEncoderOptionsExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct JpegOptions {
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported frame rate in fps (frames per second).
#[yaserde(prefix = "tt", rename = "FrameRateRange")]
pub frame_rate_range: IntRange,
// Supported encoding interval range. The encoding interval corresponds to
// the number of frames devided by the encoded frames. An encoding interval
// value of "1" means that all frames are encoded.
#[yaserde(prefix = "tt", rename = "EncodingIntervalRange")]
pub encoding_interval_range: IntRange,
}
impl Validate for JpegOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct JpegOptions2 {
// Supported range of encoded bitrate in kbps.
#[yaserde(prefix = "tt", rename = "BitrateRange")]
pub bitrate_range: IntRange,
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported frame rate in fps (frames per second).
#[yaserde(prefix = "tt", rename = "FrameRateRange")]
pub frame_rate_range: IntRange,
// Supported encoding interval range. The encoding interval corresponds to
// the number of frames devided by the encoded frames. An encoding interval
// value of "1" means that all frames are encoded.
#[yaserde(prefix = "tt", rename = "EncodingIntervalRange")]
pub encoding_interval_range: IntRange,
}
impl Validate for JpegOptions2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Mpeg4Options {
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported group of Video frames length. This value typically corresponds
// to the I-Frame distance.
#[yaserde(prefix = "tt", rename = "GovLengthRange")]
pub gov_length_range: IntRange,
// Supported frame rate in fps (frames per second).
#[yaserde(prefix = "tt", rename = "FrameRateRange")]
pub frame_rate_range: IntRange,
// Supported encoding interval range. The encoding interval corresponds to
// the number of frames devided by the encoded frames. An encoding interval
// value of "1" means that all frames are encoded.
#[yaserde(prefix = "tt", rename = "EncodingIntervalRange")]
pub encoding_interval_range: IntRange,
// List of supported MPEG-4 profiles.
#[yaserde(prefix = "tt", rename = "Mpeg4ProfilesSupported")]
pub mpeg_4_profiles_supported: Vec<Mpeg4Profile>,
}
impl Validate for Mpeg4Options {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Mpeg4Options2 {
// Supported range of encoded bitrate in kbps.
#[yaserde(prefix = "tt", rename = "BitrateRange")]
pub bitrate_range: IntRange,
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported group of Video frames length. This value typically corresponds
// to the I-Frame distance.
#[yaserde(prefix = "tt", rename = "GovLengthRange")]
pub gov_length_range: IntRange,
// Supported frame rate in fps (frames per second).
#[yaserde(prefix = "tt", rename = "FrameRateRange")]
pub frame_rate_range: IntRange,
// Supported encoding interval range. The encoding interval corresponds to
// the number of frames devided by the encoded frames. An encoding interval
// value of "1" means that all frames are encoded.
#[yaserde(prefix = "tt", rename = "EncodingIntervalRange")]
pub encoding_interval_range: IntRange,
// List of supported MPEG-4 profiles.
#[yaserde(prefix = "tt", rename = "Mpeg4ProfilesSupported")]
pub mpeg_4_profiles_supported: Vec<Mpeg4Profile>,
}
impl Validate for Mpeg4Options2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct H264Options {
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported group of Video frames length. This value typically corresponds
// to the I-Frame distance.
#[yaserde(prefix = "tt", rename = "GovLengthRange")]
pub gov_length_range: IntRange,
// Supported frame rate in fps (frames per second).
#[yaserde(prefix = "tt", rename = "FrameRateRange")]
pub frame_rate_range: IntRange,
// Supported encoding interval range. The encoding interval corresponds to
// the number of frames devided by the encoded frames. An encoding interval
// value of "1" means that all frames are encoded.
#[yaserde(prefix = "tt", rename = "EncodingIntervalRange")]
pub encoding_interval_range: IntRange,
// List of supported H.264 profiles.
#[yaserde(prefix = "tt", rename = "H264ProfilesSupported")]
pub h264_profiles_supported: Vec<H264Profile>,
}
impl Validate for H264Options {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct H264Options2 {
// Supported range of encoded bitrate in kbps.
#[yaserde(prefix = "tt", rename = "BitrateRange")]
pub bitrate_range: IntRange,
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported group of Video frames length. This value typically corresponds
// to the I-Frame distance.
#[yaserde(prefix = "tt", rename = "GovLengthRange")]
pub gov_length_range: IntRange,
// Supported frame rate in fps (frames per second).
#[yaserde(prefix = "tt", rename = "FrameRateRange")]
pub frame_rate_range: IntRange,
// Supported encoding interval range. The encoding interval corresponds to
// the number of frames devided by the encoded frames. An encoding interval
// value of "1" means that all frames are encoded.
#[yaserde(prefix = "tt", rename = "EncodingIntervalRange")]
pub encoding_interval_range: IntRange,
// List of supported H.264 profiles.
#[yaserde(prefix = "tt", rename = "H264ProfilesSupported")]
pub h264_profiles_supported: Vec<H264Profile>,
}
impl Validate for H264Options2 {}
// Video Media Subtypes as referenced by IANA (without the leading "video/"
// Video Media Type). See also
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum VideoEncodingMimeNames {
#[yaserde(rename = "JPEG")]
Jpeg,
#[yaserde(rename = "MPV4-ES")]
Mpv4Es,
H264,
H265,
__Unknown__(String),
}
impl Default for VideoEncodingMimeNames {
fn default() -> VideoEncodingMimeNames {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for VideoEncodingMimeNames {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum VideoEncodingProfiles {
Simple,
AdvancedSimple,
Baseline,
Main,
Main10,
Extended,
High,
__Unknown__(String),
}
impl Default for VideoEncodingProfiles {
fn default() -> VideoEncodingProfiles {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for VideoEncodingProfiles {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoEncoder2Configuration {
// Video Media Subtype for the video format. For definitions see
// tt:VideoEncodingMimeNames and
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: String,
// Configured video resolution
#[yaserde(prefix = "tt", rename = "Resolution")]
pub resolution: VideoResolution2,
// Optional element to configure rate control related parameters.
#[yaserde(prefix = "tt", rename = "RateControl")]
pub rate_control: Option<VideoRateControl2>,
// Defines the multicast settings that could be used for video streaming.
#[yaserde(prefix = "tt", rename = "Multicast")]
pub multicast: Option<MulticastConfiguration>,
// Relative value for the video quantizers and the quality of the video. A
// high value within supported quality range means higher quality
#[yaserde(prefix = "tt", rename = "Quality")]
pub quality: f64,
// Group of Video frames length. Determines typically the interval in which
// the I-Frames will be coded. An entry of 1 indicates I-Frames are
// continuously generated. An entry of 2 indicates that every 2nd image is
// an I-Frame, and 3 only every 3rd frame, etc. The frames in between are
// coded as P or B Frames.
#[yaserde(attribute, rename = "GovLength")]
pub gov_length: Option<i32>,
// The encoder profile as defined in tt:VideoEncodingProfiles.
#[yaserde(attribute, rename = "Profile")]
pub profile: Option<String>,
// A value of true indicates that frame rate is a fixed value rather than an
// upper limit,
// and that the video encoder shall prioritize frame rate over all other
// adaptable
// configuration values such as bitrate. Default is false.
#[yaserde(attribute, rename = "GuaranteedFrameRate")]
pub guaranteed_frame_rate: Option<bool>,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoEncoder2Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoResolution2 {
// Number of the columns of the Video image.
#[yaserde(prefix = "tt", rename = "Width")]
pub width: i32,
// Number of the lines of the Video image.
#[yaserde(prefix = "tt", rename = "Height")]
pub height: i32,
}
impl Validate for VideoResolution2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoRateControl2 {
// Desired frame rate in fps. The actual rate may be lower due to e.g.
// performance limitations.
#[yaserde(prefix = "tt", rename = "FrameRateLimit")]
pub frame_rate_limit: f64,
// the maximum output bitrate in kbps
#[yaserde(prefix = "tt", rename = "BitrateLimit")]
pub bitrate_limit: i32,
// Enforce constant bitrate.
#[yaserde(attribute, rename = "ConstantBitRate")]
pub constant_bit_rate: Option<bool>,
}
impl Validate for VideoRateControl2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoEncoder2ConfigurationOptions {
// Video Media Subtype for the video format. For definitions see
// tt:VideoEncodingMimeNames and
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: String,
// Range of the quality values. A high value means higher quality.
#[yaserde(prefix = "tt", rename = "QualityRange")]
pub quality_range: FloatRange,
// List of supported image sizes.
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution2>,
// Supported range of encoded bitrate in kbps.
#[yaserde(prefix = "tt", rename = "BitrateRange")]
pub bitrate_range: IntRange,
// Exactly two values, which define the Lower and Upper bounds for the
// supported group of Video frames length. These values typically correspond
// to the I-Frame distance.
#[yaserde(attribute, rename = "GovLengthRange")]
pub gov_length_range: Option<IntAttrList>,
// List of supported target frame rates in fps (frames per second). The list
// shall be sorted with highest values first.
#[yaserde(attribute, rename = "FrameRatesSupported")]
pub frame_rates_supported: Option<FloatAttrList>,
// List of supported encoder profiles as defined in
// tt::VideoEncodingProfiles.
#[yaserde(attribute, rename = "ProfilesSupported")]
pub profiles_supported: Option<StringAttrList>,
// Signal whether enforcing constant bitrate is supported.
#[yaserde(attribute, rename = "ConstantBitRateSupported")]
pub constant_bit_rate_supported: Option<bool>,
// Indicates the support for the GuaranteedFrameRate attribute on the
// VideoEncoder2Configuration element.
#[yaserde(attribute, rename = "GuaranteedFrameRateSupported")]
pub guaranteed_frame_rate_supported: Option<bool>,
}
impl Validate for VideoEncoder2ConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioSourceConfiguration {
// Token of the Audio Source the configuration applies to
#[yaserde(prefix = "tt", rename = "SourceToken")]
pub source_token: ReferenceToken,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioSourceConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioSourceConfigurationOptions {
// Tokens of the audio source the configuration can be used for.
#[yaserde(prefix = "tt", rename = "InputTokensAvailable")]
pub input_tokens_available: Vec<ReferenceToken>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AudioSourceOptionsExtension>,
}
impl Validate for AudioSourceConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioSourceOptionsExtension {}
impl Validate for AudioSourceOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioEncoderConfiguration {
// Audio codec used for encoding the audio input (either G.711, G.726 or
// AAC)
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: AudioEncoding,
// The output bitrate in kbps.
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: i32,
// The output sample rate in kHz.
#[yaserde(prefix = "tt", rename = "SampleRate")]
pub sample_rate: i32,
// Defines the multicast settings that could be used for video streaming.
#[yaserde(prefix = "tt", rename = "Multicast")]
pub multicast: MulticastConfiguration,
// The rtsp session timeout for the related audio stream
#[yaserde(prefix = "tt", rename = "SessionTimeout")]
pub session_timeout: xs::Duration,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioEncoderConfiguration {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum AudioEncoding {
G711,
G726,
#[yaserde(rename = "AAC")]
Aac,
__Unknown__(String),
}
impl Default for AudioEncoding {
fn default() -> AudioEncoding {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for AudioEncoding {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioEncoderConfigurationOptions {
// list of supported AudioEncoderConfigurations
#[yaserde(prefix = "tt", rename = "Options")]
pub options: Vec<AudioEncoderConfigurationOption>,
}
impl Validate for AudioEncoderConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioEncoderConfigurationOption {
// The enoding used for audio data (either G.711, G.726 or AAC)
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: AudioEncoding,
// List of supported bitrates in kbps for the specified Encoding
#[yaserde(prefix = "tt", rename = "BitrateList")]
pub bitrate_list: IntList,
// List of supported Sample Rates in kHz for the specified Encoding
#[yaserde(prefix = "tt", rename = "SampleRateList")]
pub sample_rate_list: IntList,
}
impl Validate for AudioEncoderConfigurationOption {}
// Audio Media Subtypes as referenced by IANA (without the leading "audio/"
// Audio Media Type). See also
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum AudioEncodingMimeNames {
#[yaserde(rename = "PCMU")]
Pcmu,
G726,
#[yaserde(rename = "MP4A-LATM")]
Mp4ALatm,
#[yaserde(rename = "mpeg4-generic")]
Mpeg4Generic,
__Unknown__(String),
}
impl Default for AudioEncodingMimeNames {
fn default() -> AudioEncodingMimeNames {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for AudioEncodingMimeNames {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioEncoder2Configuration {
// Audio Media Subtype for the audio format. For definitions see
// tt:AudioEncodingMimeNames and
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: String,
// Optional multicast configuration of the audio stream.
#[yaserde(prefix = "tt", rename = "Multicast")]
pub multicast: Option<MulticastConfiguration>,
// The output bitrate in kbps.
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: i32,
// The output sample rate in kHz.
#[yaserde(prefix = "tt", rename = "SampleRate")]
pub sample_rate: i32,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioEncoder2Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioEncoder2ConfigurationOptions {
// Audio Media Subtype for the audio format. For definitions see
// tt:AudioEncodingMimeNames and
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: String,
// List of supported bitrates in kbps for the specified Encoding
#[yaserde(prefix = "tt", rename = "BitrateList")]
pub bitrate_list: IntList,
// List of supported Sample Rates in kHz for the specified Encoding
#[yaserde(prefix = "tt", rename = "SampleRateList")]
pub sample_rate_list: IntList,
}
impl Validate for AudioEncoder2ConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoAnalyticsConfiguration {
#[yaserde(prefix = "tt", rename = "AnalyticsEngineConfiguration")]
pub analytics_engine_configuration: AnalyticsEngineConfiguration,
#[yaserde(prefix = "tt", rename = "RuleEngineConfiguration")]
pub rule_engine_configuration: RuleEngineConfiguration,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoAnalyticsConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataConfiguration {
// optional element to configure which PTZ related data is to include in the
// metadata stream
#[yaserde(prefix = "tt", rename = "PTZStatus")]
pub ptz_status: Option<Ptzfilter>,
// Optional element to configure the streaming of events. A client might be
// interested in receiving all,
// none or some of the events produced by the device:
#[yaserde(prefix = "tt", rename = "Events")]
pub events: Option<EventSubscription>,
// Defines whether the streamed metadata will include metadata from the
// analytics engines (video, cell motion, audio etc.)
#[yaserde(prefix = "tt", rename = "Analytics")]
pub analytics: Option<bool>,
// Defines the multicast settings that could be used for video streaming.
#[yaserde(prefix = "tt", rename = "Multicast")]
pub multicast: MulticastConfiguration,
// The rtsp session timeout for the related audio stream (when using Media2
// Service, this value is deprecated and ignored)
#[yaserde(prefix = "tt", rename = "SessionTimeout")]
pub session_timeout: xs::Duration,
#[yaserde(prefix = "tt", rename = "AnalyticsEngineConfiguration")]
pub analytics_engine_configuration: Option<AnalyticsEngineConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MetadataConfigurationExtension>,
// Optional parameter to configure compression type of Metadata payload. Use
// values from enumeration MetadataCompressionType.
#[yaserde(attribute, rename = "CompressionType")]
pub compression_type: Option<String>,
// Optional parameter to configure if the metadata stream shall contain the
// Geo Location coordinates of each target.
#[yaserde(attribute, rename = "GeoLocation")]
pub geo_location: Option<bool>,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for MetadataConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataConfigurationExtension {}
impl Validate for MetadataConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptzfilter {
// True if the metadata stream shall contain the PTZ status (IDLE, MOVING or
// UNKNOWN)
#[yaserde(prefix = "tt", rename = "Status")]
pub status: bool,
// True if the metadata stream shall contain the PTZ position
#[yaserde(prefix = "tt", rename = "Position")]
pub position: bool,
}
impl Validate for Ptzfilter {}
// Subcription handling in the same way as base notification subscription.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EventSubscription {
#[yaserde(prefix = "tt", rename = "Filter")]
pub filter: Option<wsnt::FilterType>,
#[yaserde(prefix = "tt", rename = "SubscriptionPolicy")]
pub subscription_policy: Option<event_subscription::SubscriptionPolicyType>,
}
impl Validate for EventSubscription {}
pub mod event_subscription {
use super::*;
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SubscriptionPolicyType {}
impl Validate for SubscriptionPolicyType {}
}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataConfigurationOptions {
#[yaserde(prefix = "tt", rename = "PTZStatusFilterOptions")]
pub ptz_status_filter_options: PtzstatusFilterOptions,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MetadataConfigurationOptionsExtension>,
// True if the device is able to stream the Geo Located positions of each
// target.
#[yaserde(attribute, rename = "GeoLocation")]
pub geo_location: Option<bool>,
}
impl Validate for MetadataConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataConfigurationOptionsExtension {
// List of supported metadata compression type. Its options shall be chosen
// from tt:MetadataCompressionType.
#[yaserde(prefix = "tt", rename = "CompressionType")]
pub compression_type: Vec<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MetadataConfigurationOptionsExtension2>,
}
impl Validate for MetadataConfigurationOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataConfigurationOptionsExtension2 {}
impl Validate for MetadataConfigurationOptionsExtension2 {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum MetadataCompressionType {
None,
#[yaserde(rename = "GZIP")]
Gzip,
#[yaserde(rename = "EXI")]
Exi,
__Unknown__(String),
}
impl Default for MetadataCompressionType {
fn default() -> MetadataCompressionType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for MetadataCompressionType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzstatusFilterOptions {
// True if the device is able to stream pan or tilt status information.
#[yaserde(prefix = "tt", rename = "PanTiltStatusSupported")]
pub pan_tilt_status_supported: bool,
// True if the device is able to stream zoom status inforamtion.
#[yaserde(prefix = "tt", rename = "ZoomStatusSupported")]
pub zoom_status_supported: bool,
// True if the device is able to stream the pan or tilt position.
#[yaserde(prefix = "tt", rename = "PanTiltPositionSupported")]
pub pan_tilt_position_supported: Option<bool>,
// True if the device is able to stream zoom position information.
#[yaserde(prefix = "tt", rename = "ZoomPositionSupported")]
pub zoom_position_supported: Option<bool>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzstatusFilterOptionsExtension>,
}
impl Validate for PtzstatusFilterOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzstatusFilterOptionsExtension {}
impl Validate for PtzstatusFilterOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoOutput {
#[yaserde(prefix = "tt", rename = "Layout")]
pub layout: Layout,
// Resolution of the display in Pixel.
#[yaserde(prefix = "tt", rename = "Resolution")]
pub resolution: Option<VideoResolution>,
// Refresh rate of the display in Hertz.
#[yaserde(prefix = "tt", rename = "RefreshRate")]
pub refresh_rate: Option<f64>,
// Aspect ratio of the display as physical extent of width divided by
// height.
#[yaserde(prefix = "tt", rename = "AspectRatio")]
pub aspect_ratio: Option<f64>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoOutputExtension>,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoOutput {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoOutputExtension {}
impl Validate for VideoOutputExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoOutputConfiguration {
// Token of the Video Output the configuration applies to
#[yaserde(prefix = "tt", rename = "OutputToken")]
pub output_token: ReferenceToken,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for VideoOutputConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoOutputConfigurationOptions {}
impl Validate for VideoOutputConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoDecoderConfigurationOptions {
// If the device is able to decode Jpeg streams this element describes the
// supported codecs and configurations
#[yaserde(prefix = "tt", rename = "JpegDecOptions")]
pub jpeg_dec_options: Option<JpegDecOptions>,
// If the device is able to decode H.264 streams this element describes the
// supported codecs and configurations
#[yaserde(prefix = "tt", rename = "H264DecOptions")]
pub h264_dec_options: Option<H264DecOptions>,
// If the device is able to decode Mpeg4 streams this element describes the
// supported codecs and configurations
#[yaserde(prefix = "tt", rename = "Mpeg4DecOptions")]
pub mpeg_4_dec_options: Option<Mpeg4DecOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<VideoDecoderConfigurationOptionsExtension>,
}
impl Validate for VideoDecoderConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct H264DecOptions {
// List of supported H.264 Video Resolutions
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// List of supported H264 Profiles (either baseline, main, extended or high)
#[yaserde(prefix = "tt", rename = "SupportedH264Profiles")]
pub supported_h264_profiles: Vec<H264Profile>,
// Supported H.264 bitrate range in kbps
#[yaserde(prefix = "tt", rename = "SupportedInputBitrate")]
pub supported_input_bitrate: IntRange,
// Supported H.264 framerate range in fps
#[yaserde(prefix = "tt", rename = "SupportedFrameRate")]
pub supported_frame_rate: IntRange,
}
impl Validate for H264DecOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct JpegDecOptions {
// List of supported Jpeg Video Resolutions
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// Supported Jpeg bitrate range in kbps
#[yaserde(prefix = "tt", rename = "SupportedInputBitrate")]
pub supported_input_bitrate: IntRange,
// Supported Jpeg framerate range in fps
#[yaserde(prefix = "tt", rename = "SupportedFrameRate")]
pub supported_frame_rate: IntRange,
}
impl Validate for JpegDecOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Mpeg4DecOptions {
// List of supported Mpeg4 Video Resolutions
#[yaserde(prefix = "tt", rename = "ResolutionsAvailable")]
pub resolutions_available: Vec<VideoResolution>,
// List of supported Mpeg4 Profiles (either SP or ASP)
#[yaserde(prefix = "tt", rename = "SupportedMpeg4Profiles")]
pub supported_mpeg_4_profiles: Vec<Mpeg4Profile>,
// Supported Mpeg4 bitrate range in kbps
#[yaserde(prefix = "tt", rename = "SupportedInputBitrate")]
pub supported_input_bitrate: IntRange,
// Supported Mpeg4 framerate range in fps
#[yaserde(prefix = "tt", rename = "SupportedFrameRate")]
pub supported_frame_rate: IntRange,
}
impl Validate for Mpeg4DecOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoDecoderConfigurationOptionsExtension {}
impl Validate for VideoDecoderConfigurationOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioOutput {
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioOutput {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioOutputConfiguration {
// Token of the phsycial Audio output.
#[yaserde(prefix = "tt", rename = "OutputToken")]
pub output_token: ReferenceToken,
// An audio channel MAY support different types of audio transmission. While
// for full duplex
// operation no special handling is required, in half duplex operation the
// transmission direction
// needs to be switched.
// The optional SendPrimacy parameter inside the AudioOutputConfiguration
// indicates which
// direction is currently active. An NVC can switch between different modes
// by setting the
// AudioOutputConfiguration.
#[yaserde(prefix = "tt", rename = "SendPrimacy")]
pub send_primacy: Option<String>,
// Volume setting of the output. The applicable range is defined via the
// option AudioOutputOptions.OutputLevelRange.
#[yaserde(prefix = "tt", rename = "OutputLevel")]
pub output_level: i32,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioOutputConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioOutputConfigurationOptions {
// Tokens of the physical Audio outputs (typically one).
#[yaserde(prefix = "tt", rename = "OutputTokensAvailable")]
pub output_tokens_available: Vec<ReferenceToken>,
// An
#[yaserde(prefix = "tt", rename = "SendPrimacyOptions")]
pub send_primacy_options: Vec<String>,
// Minimum and maximum level range supported for this Output.
#[yaserde(prefix = "tt", rename = "OutputLevelRange")]
pub output_level_range: IntRange,
}
impl Validate for AudioOutputConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioDecoderConfiguration {
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AudioDecoderConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioDecoderConfigurationOptions {
// If the device is able to decode AAC encoded audio this section describes
// the supported configurations
#[yaserde(prefix = "tt", rename = "AACDecOptions")]
pub aac_dec_options: Option<AacdecOptions>,
// If the device is able to decode G711 encoded audio this section describes
// the supported configurations
#[yaserde(prefix = "tt", rename = "G711DecOptions")]
pub g711_dec_options: Option<G711DecOptions>,
// If the device is able to decode G726 encoded audio this section describes
// the supported configurations
#[yaserde(prefix = "tt", rename = "G726DecOptions")]
pub g726_dec_options: Option<G726DecOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AudioDecoderConfigurationOptionsExtension>,
}
impl Validate for AudioDecoderConfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct G711DecOptions {
// List of supported bitrates in kbps
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: IntList,
// List of supported sample rates in kHz
#[yaserde(prefix = "tt", rename = "SampleRateRange")]
pub sample_rate_range: IntList,
}
impl Validate for G711DecOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AacdecOptions {
// List of supported bitrates in kbps
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: IntList,
// List of supported sample rates in kHz
#[yaserde(prefix = "tt", rename = "SampleRateRange")]
pub sample_rate_range: IntList,
}
impl Validate for AacdecOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct G726DecOptions {
// List of supported bitrates in kbps
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: IntList,
// List of supported sample rates in kHz
#[yaserde(prefix = "tt", rename = "SampleRateRange")]
pub sample_rate_range: IntList,
}
impl Validate for G726DecOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioDecoderConfigurationOptionsExtension {}
impl Validate for AudioDecoderConfigurationOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MulticastConfiguration {
// The multicast address (if this address is set to 0 no multicast streaming
// is enaled)
#[yaserde(prefix = "tt", rename = "Address")]
pub address: Ipaddress,
// The RTP mutlicast destination port. A device may support RTCP. In this
// case the port value shall be even to allow the corresponding RTCP stream
// to be mapped to the next higher (odd) destination port number as defined
// in the RTSP specification.
#[yaserde(prefix = "tt", rename = "Port")]
pub port: i32,
// In case of IPv6 the TTL value is assumed as the hop limit. Note that for
// IPV6 and administratively scoped IPv4 multicast the primary use for hop
// limit / TTL is to prevent packets from (endlessly) circulating and not
// limiting scope. In these cases the address contains the scope.
#[yaserde(prefix = "tt", rename = "TTL")]
pub ttl: i32,
// Read only property signalling that streaming is persistant. Use the
// methods StartMulticastStreaming and StopMulticastStreaming to switch its
// state.
#[yaserde(prefix = "tt", rename = "AutoStart")]
pub auto_start: bool,
}
impl Validate for MulticastConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct StreamSetup {
// Defines if a multicast or unicast stream is requested
#[yaserde(prefix = "tt", rename = "Stream")]
pub stream: StreamType,
#[yaserde(prefix = "tt", rename = "Transport")]
pub transport: Transport,
}
impl Validate for StreamSetup {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum StreamType {
#[yaserde(rename = "RTP-Unicast")]
RtpUnicast,
#[yaserde(rename = "RTP-Multicast")]
RtpMulticast,
__Unknown__(String),
}
impl Default for StreamType {
fn default() -> StreamType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for StreamType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Transport {
// Defines the network protocol for streaming, either UDP=RTP/UDP,
// RTSP=RTP/RTSP/TCP or HTTP=RTP/RTSP/HTTP/TCP
#[yaserde(prefix = "tt", rename = "Protocol")]
pub protocol: TransportProtocol,
// Optional element to describe further tunnel options. This element is
// normally not needed
#[yaserde(prefix = "tt", rename = "Tunnel")]
pub tunnel: Vec<Transport>,
}
impl Validate for Transport {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum TransportProtocol {
#[yaserde(rename = "UDP")]
Udp,
// This value is deprecated.
#[yaserde(rename = "TCP")]
Tcp,
#[yaserde(rename = "RTSP")]
Rtsp,
#[yaserde(rename = "HTTP")]
Http,
__Unknown__(String),
}
impl Default for TransportProtocol {
fn default() -> TransportProtocol {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for TransportProtocol {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MediaUri {
// Stable Uri to be used for requesting the media stream
#[yaserde(prefix = "tt", rename = "Uri")]
pub uri: String,
// Indicates if the Uri is only valid until the connection is established.
// The value shall be set to "false".
#[yaserde(prefix = "tt", rename = "InvalidAfterConnect")]
pub invalid_after_connect: bool,
// Indicates if the Uri is invalid after a reboot of the device. The value
// shall be set to "false".
#[yaserde(prefix = "tt", rename = "InvalidAfterReboot")]
pub invalid_after_reboot: bool,
// Duration how long the Uri is valid. This parameter shall be set to PT0S
// to indicate that this stream URI is indefinitely valid even if the
// profile changes
#[yaserde(prefix = "tt", rename = "Timeout")]
pub timeout: xs::Duration,
}
impl Validate for MediaUri {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ScopeDefinition {
Fixed,
Configurable,
__Unknown__(String),
}
impl Default for ScopeDefinition {
fn default() -> ScopeDefinition {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ScopeDefinition {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Scope {
// Indicates if the scope is fixed or configurable.
#[yaserde(prefix = "tt", rename = "ScopeDef")]
pub scope_def: ScopeDefinition,
// Scope item URI.
#[yaserde(prefix = "tt", rename = "ScopeItem")]
pub scope_item: String,
}
impl Validate for Scope {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum DiscoveryMode {
Discoverable,
NonDiscoverable,
__Unknown__(String),
}
impl Default for DiscoveryMode {
fn default() -> DiscoveryMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for DiscoveryMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterface {
// Indicates whether or not an interface is enabled.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: bool,
// Network interface information
#[yaserde(prefix = "tt", rename = "Info")]
pub info: Option<NetworkInterfaceInfo>,
// Link configuration.
#[yaserde(prefix = "tt", rename = "Link")]
pub link: Option<NetworkInterfaceLink>,
// IPv4 network interface configuration.
#[yaserde(prefix = "tt", rename = "IPv4")]
pub i_pv_4: Vec<Ipv4NetworkInterface>,
// IPv6 network interface configuration.
#[yaserde(prefix = "tt", rename = "IPv6")]
pub i_pv_6: Vec<Ipv6NetworkInterface>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkInterfaceExtension>,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for NetworkInterface {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceExtension {
#[yaserde(prefix = "tt", rename = "InterfaceType")]
pub interface_type: IanaIfTypes,
// Extension point prepared for future 802.3 configuration.
#[yaserde(prefix = "tt", rename = "Dot3")]
pub dot_3: Vec<Dot3Configuration>,
#[yaserde(prefix = "tt", rename = "Dot11")]
pub dot_11: Vec<Dot11Configuration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkInterfaceExtension2>,
}
impl Validate for NetworkInterfaceExtension {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct NetworkInterfaceConfigPriority(pub xs::Integer);
impl Validate for NetworkInterfaceConfigPriority {
fn validate(&self) -> Result<(), String> {
if self.0 < "0".parse().unwrap() {
return Err(format!("MinInclusive validation error: invalid value of 0! \nExpected: 0 >= 0.\nActual: 0 == {}", self.0));
}
if self.0 > "31".parse().unwrap() {
return Err(format!("MaxInclusive validation error: invalid value of 0! \nExpected: 0 <= 31.\nActual: 0 == {}", self.0));
}
Ok(())
}
}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot3Configuration {}
impl Validate for Dot3Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceExtension2 {}
impl Validate for NetworkInterfaceExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceLink {
// Configured link settings.
#[yaserde(prefix = "tt", rename = "AdminSettings")]
pub admin_settings: NetworkInterfaceConnectionSetting,
// Current active link settings.
#[yaserde(prefix = "tt", rename = "OperSettings")]
pub oper_settings: NetworkInterfaceConnectionSetting,
// Integer indicating interface type, for example: 6 is ethernet.
#[yaserde(prefix = "tt", rename = "InterfaceType")]
pub interface_type: IanaIfTypes,
}
impl Validate for NetworkInterfaceLink {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceConnectionSetting {
// Auto negotiation on/off.
#[yaserde(prefix = "tt", rename = "AutoNegotiation")]
pub auto_negotiation: bool,
// Speed.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: i32,
// Duplex type, Half or Full.
#[yaserde(prefix = "tt", rename = "Duplex")]
pub duplex: Duplex,
}
impl Validate for NetworkInterfaceConnectionSetting {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Duplex {
Full,
Half,
__Unknown__(String),
}
impl Default for Duplex {
fn default() -> Duplex {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Duplex {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct IanaIfTypes(pub i32);
impl Validate for IanaIfTypes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceInfo {
// Network interface name, for example eth0.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Option<String>,
// Network interface MAC address.
#[yaserde(prefix = "tt", rename = "HwAddress")]
pub hw_address: HwAddress,
// Maximum transmission unit.
#[yaserde(prefix = "tt", rename = "MTU")]
pub mtu: Option<i32>,
}
impl Validate for NetworkInterfaceInfo {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv6NetworkInterface {
// Indicates whether or not IPv6 is enabled.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: bool,
// IPv6 configuration.
#[yaserde(prefix = "tt", rename = "Config")]
pub config: Option<Ipv6Configuration>,
}
impl Validate for Ipv6NetworkInterface {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv4NetworkInterface {
// Indicates whether or not IPv4 is enabled.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: bool,
// IPv4 configuration.
#[yaserde(prefix = "tt", rename = "Config")]
pub config: Ipv4Configuration,
}
impl Validate for Ipv4NetworkInterface {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv4Configuration {
// List of manually added IPv4 addresses.
#[yaserde(prefix = "tt", rename = "Manual")]
pub manual: Vec<PrefixedIPv4Address>,
// Link local address.
#[yaserde(prefix = "tt", rename = "LinkLocal")]
pub link_local: Option<PrefixedIPv4Address>,
// IPv4 address configured by using DHCP.
#[yaserde(prefix = "tt", rename = "FromDHCP")]
pub from_dhcp: Option<PrefixedIPv4Address>,
// Indicates whether or not DHCP is used.
#[yaserde(prefix = "tt", rename = "DHCP")]
pub dhcp: bool,
}
impl Validate for Ipv4Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv6Configuration {
// Indicates whether router advertisment is used.
#[yaserde(prefix = "tt", rename = "AcceptRouterAdvert")]
pub accept_router_advert: Option<bool>,
// DHCP configuration.
#[yaserde(prefix = "tt", rename = "DHCP")]
pub dhcp: Ipv6DHCPConfiguration,
// List of manually entered IPv6 addresses.
#[yaserde(prefix = "tt", rename = "Manual")]
pub manual: Vec<PrefixedIPv6Address>,
// List of link local IPv6 addresses.
#[yaserde(prefix = "tt", rename = "LinkLocal")]
pub link_local: Vec<PrefixedIPv6Address>,
// List of IPv6 addresses configured by using DHCP.
#[yaserde(prefix = "tt", rename = "FromDHCP")]
pub from_dhcp: Vec<PrefixedIPv6Address>,
// List of IPv6 addresses configured by using router advertisment.
#[yaserde(prefix = "tt", rename = "FromRA")]
pub from_ra: Vec<PrefixedIPv6Address>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<Ipv6ConfigurationExtension>,
}
impl Validate for Ipv6Configuration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv6ConfigurationExtension {}
impl Validate for Ipv6ConfigurationExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Ipv6DHCPConfiguration {
Auto,
Stateful,
Stateless,
Off,
__Unknown__(String),
}
impl Default for Ipv6DHCPConfiguration {
fn default() -> Ipv6DHCPConfiguration {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Ipv6DHCPConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkProtocol {
// Network protocol type string.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: NetworkProtocolType,
// Indicates if the protocol is enabled or not.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: bool,
// The port that is used by the protocol.
#[yaserde(prefix = "tt", rename = "Port")]
pub port: Vec<i32>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkProtocolExtension>,
}
impl Validate for NetworkProtocol {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkProtocolExtension {}
impl Validate for NetworkProtocolExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum NetworkProtocolType {
#[yaserde(rename = "HTTP")]
Http,
#[yaserde(rename = "HTTPS")]
Https,
#[yaserde(rename = "RTSP")]
Rtsp,
__Unknown__(String),
}
impl Default for NetworkProtocolType {
fn default() -> NetworkProtocolType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for NetworkProtocolType {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum NetworkHostType {
#[yaserde(rename = "IPv4")]
Ipv4,
#[yaserde(rename = "IPv6")]
Ipv6,
#[yaserde(rename = "DNS")]
Dns,
__Unknown__(String),
}
impl Default for NetworkHostType {
fn default() -> NetworkHostType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for NetworkHostType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkHost {
// Network host type: IPv4, IPv6 or DNS.
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: NetworkHostType,
// IPv4 address.
#[yaserde(prefix = "tt", rename = "IPv4Address")]
pub i_pv_4_address: Option<Ipv4Address>,
// IPv6 address.
#[yaserde(prefix = "tt", rename = "IPv6Address")]
pub i_pv_6_address: Option<Ipv6Address>,
// DNS name.
#[yaserde(prefix = "tt", rename = "DNSname")]
pub dn_sname: Option<Dnsname>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkHostExtension>,
}
impl Validate for NetworkHost {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkHostExtension {}
impl Validate for NetworkHostExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipaddress {
// Indicates if the address is an IPv4 or IPv6 address.
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: Iptype,
// IPv4 address.
#[yaserde(prefix = "tt", rename = "IPv4Address")]
pub i_pv_4_address: Option<Ipv4Address>,
// IPv6 address
#[yaserde(prefix = "tt", rename = "IPv6Address")]
pub i_pv_6_address: Option<Ipv6Address>,
}
impl Validate for Ipaddress {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PrefixedIPv4Address {
// IPv4 address
#[yaserde(prefix = "tt", rename = "Address")]
pub address: Ipv4Address,
// Prefix/submask length
#[yaserde(prefix = "tt", rename = "PrefixLength")]
pub prefix_length: i32,
}
impl Validate for PrefixedIPv4Address {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Ipv4Address(pub String);
impl Validate for Ipv4Address {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PrefixedIPv6Address {
// IPv6 address
#[yaserde(prefix = "tt", rename = "Address")]
pub address: Ipv6Address,
// Prefix/submask length
#[yaserde(prefix = "tt", rename = "PrefixLength")]
pub prefix_length: i32,
}
impl Validate for PrefixedIPv6Address {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Ipv6Address(pub String);
impl Validate for Ipv6Address {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct HwAddress(pub String);
impl Validate for HwAddress {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Iptype {
#[yaserde(rename = "IPv4")]
Ipv4,
#[yaserde(rename = "IPv6")]
Ipv6,
__Unknown__(String),
}
impl Default for Iptype {
fn default() -> Iptype {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Iptype {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Dnsname(pub String);
impl Validate for Dnsname {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct HostnameInformation {
// Indicates whether the hostname is obtained from DHCP or not.
#[yaserde(prefix = "tt", rename = "FromDHCP")]
pub from_dhcp: bool,
// Indicates the hostname.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Option<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<HostnameInformationExtension>,
}
impl Validate for HostnameInformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct HostnameInformationExtension {}
impl Validate for HostnameInformationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dnsinformation {
// Indicates whether or not DNS information is retrieved from DHCP.
#[yaserde(prefix = "tt", rename = "FromDHCP")]
pub from_dhcp: bool,
// Search domain.
#[yaserde(prefix = "tt", rename = "SearchDomain")]
pub search_domain: Vec<String>,
// List of DNS addresses received from DHCP.
#[yaserde(prefix = "tt", rename = "DNSFromDHCP")]
pub dns_from_dhcp: Vec<Ipaddress>,
// List of manually entered DNS addresses.
#[yaserde(prefix = "tt", rename = "DNSManual")]
pub dns_manual: Vec<Ipaddress>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<DnsinformationExtension>,
}
impl Validate for Dnsinformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DnsinformationExtension {}
impl Validate for DnsinformationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ntpinformation {
// Indicates if NTP information is to be retrieved by using DHCP.
#[yaserde(prefix = "tt", rename = "FromDHCP")]
pub from_dhcp: bool,
// List of NTP addresses retrieved by using DHCP.
#[yaserde(prefix = "tt", rename = "NTPFromDHCP")]
pub ntp_from_dhcp: Vec<NetworkHost>,
// List of manually entered NTP addresses.
#[yaserde(prefix = "tt", rename = "NTPManual")]
pub ntp_manual: Vec<NetworkHost>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NtpinformationExtension>,
}
impl Validate for Ntpinformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NtpinformationExtension {}
impl Validate for NtpinformationExtension {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Domain(pub String);
impl Validate for Domain {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum IpaddressFilterType {
Allow,
Deny,
__Unknown__(String),
}
impl Default for IpaddressFilterType {
fn default() -> IpaddressFilterType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for IpaddressFilterType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DynamicDNSInformation {
// Dynamic DNS type.
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: DynamicDNSType,
// DNS name.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Option<Dnsname>,
// Time to live.
#[yaserde(prefix = "tt", rename = "TTL")]
pub ttl: Option<xs::Duration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<DynamicDNSInformationExtension>,
}
impl Validate for DynamicDNSInformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DynamicDNSInformationExtension {}
impl Validate for DynamicDNSInformationExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum DynamicDNSType {
NoUpdate,
ClientUpdates,
ServerUpdates,
__Unknown__(String),
}
impl Default for DynamicDNSType {
fn default() -> DynamicDNSType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for DynamicDNSType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceSetConfiguration {
// Indicates whether or not an interface is enabled.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: Option<bool>,
// Link configuration.
#[yaserde(prefix = "tt", rename = "Link")]
pub link: Option<NetworkInterfaceConnectionSetting>,
// Maximum transmission unit.
#[yaserde(prefix = "tt", rename = "MTU")]
pub mtu: Option<i32>,
// IPv4 network interface configuration.
#[yaserde(prefix = "tt", rename = "IPv4")]
pub i_pv_4: Vec<Ipv4NetworkInterfaceSetConfiguration>,
// IPv6 network interface configuration.
#[yaserde(prefix = "tt", rename = "IPv6")]
pub i_pv_6: Vec<Ipv6NetworkInterfaceSetConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkInterfaceSetConfigurationExtension>,
}
impl Validate for NetworkInterfaceSetConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceSetConfigurationExtension {
#[yaserde(prefix = "tt", rename = "Dot3")]
pub dot_3: Vec<Dot3Configuration>,
#[yaserde(prefix = "tt", rename = "Dot11")]
pub dot_11: Vec<Dot11Configuration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkInterfaceSetConfigurationExtension2>,
}
impl Validate for NetworkInterfaceSetConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv6NetworkInterfaceSetConfiguration {
// Indicates whether or not IPv6 is enabled.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: Option<bool>,
// Indicates whether router advertisment is used.
#[yaserde(prefix = "tt", rename = "AcceptRouterAdvert")]
pub accept_router_advert: Option<bool>,
// List of manually added IPv6 addresses.
#[yaserde(prefix = "tt", rename = "Manual")]
pub manual: Vec<PrefixedIPv6Address>,
// DHCP configuration.
#[yaserde(prefix = "tt", rename = "DHCP")]
pub dhcp: Option<Ipv6DHCPConfiguration>,
}
impl Validate for Ipv6NetworkInterfaceSetConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ipv4NetworkInterfaceSetConfiguration {
// Indicates whether or not IPv4 is enabled.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: Option<bool>,
// List of manually added IPv4 addresses.
#[yaserde(prefix = "tt", rename = "Manual")]
pub manual: Vec<PrefixedIPv4Address>,
// Indicates whether or not DHCP is used.
#[yaserde(prefix = "tt", rename = "DHCP")]
pub dhcp: Option<bool>,
}
impl Validate for Ipv4NetworkInterfaceSetConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkGateway {
// IPv4 address string.
#[yaserde(prefix = "tt", rename = "IPv4Address")]
pub i_pv_4_address: Vec<Ipv4Address>,
// IPv6 address string.
#[yaserde(prefix = "tt", rename = "IPv6Address")]
pub i_pv_6_address: Vec<Ipv6Address>,
}
impl Validate for NetworkGateway {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkZeroConfiguration {
// Unique identifier of network interface.
#[yaserde(prefix = "tt", rename = "InterfaceToken")]
pub interface_token: ReferenceToken,
// Indicates whether the zero-configuration is enabled or not.
#[yaserde(prefix = "tt", rename = "Enabled")]
pub enabled: bool,
// The zero-configuration IPv4 address(es)
#[yaserde(prefix = "tt", rename = "Addresses")]
pub addresses: Vec<Ipv4Address>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkZeroConfigurationExtension>,
}
impl Validate for NetworkZeroConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkZeroConfigurationExtension {
// Optional array holding the configuration for the second and possibly
// further interfaces.
#[yaserde(prefix = "tt", rename = "Additional")]
pub additional: Vec<NetworkZeroConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkZeroConfigurationExtension2>,
}
impl Validate for NetworkZeroConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkZeroConfigurationExtension2 {}
impl Validate for NetworkZeroConfigurationExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IpaddressFilter {
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: IpaddressFilterType,
#[yaserde(prefix = "tt", rename = "IPv4Address")]
pub i_pv_4_address: Vec<PrefixedIPv4Address>,
#[yaserde(prefix = "tt", rename = "IPv6Address")]
pub i_pv_6_address: Vec<PrefixedIPv6Address>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<IpaddressFilterExtension>,
}
impl Validate for IpaddressFilter {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IpaddressFilterExtension {}
impl Validate for IpaddressFilterExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11Configuration {
#[yaserde(prefix = "tt", rename = "SSID")]
pub ssid: Dot11SSIDType,
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Dot11StationMode,
#[yaserde(prefix = "tt", rename = "Alias")]
pub alias: Name,
#[yaserde(prefix = "tt", rename = "Priority")]
pub priority: NetworkInterfaceConfigPriority,
#[yaserde(prefix = "tt", rename = "Security")]
pub security: Dot11SecurityConfiguration,
}
impl Validate for Dot11Configuration {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Dot11SSIDType(pub String);
impl Validate for Dot11SSIDType {
fn validate(&self) -> Result<(), String> {
if self.0.len() < "1".parse().unwrap() {
return Err(format!(
"MinLength validation error. \nExpected: 0 length >= 1 \nActual: 0 length == {}",
self.0.len()
));
}
if self.0.len() > "32".parse().unwrap() {
return Err(format!(
"MaxLength validation error. \nExpected: 0 length <= 32 \nActual: 0 length == {}",
self.0.len()
));
}
Ok(())
}
}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Dot11StationMode {
#[yaserde(rename = "Ad-hoc")]
AdHoc,
Infrastructure,
Extended,
__Unknown__(String),
}
impl Default for Dot11StationMode {
fn default() -> Dot11StationMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Dot11StationMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11SecurityConfiguration {
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Dot11SecurityMode,
#[yaserde(prefix = "tt", rename = "Algorithm")]
pub algorithm: Option<Dot11Cipher>,
#[yaserde(prefix = "tt", rename = "PSK")]
pub psk: Option<Dot11PSKSet>,
#[yaserde(prefix = "tt", rename = "Dot1X")]
pub dot_1x: Option<ReferenceToken>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<Dot11SecurityConfigurationExtension>,
}
impl Validate for Dot11SecurityConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11SecurityConfigurationExtension {}
impl Validate for Dot11SecurityConfigurationExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Dot11SecurityMode {
None,
#[yaserde(rename = "WEP")]
Wep,
#[yaserde(rename = "PSK")]
Psk,
Dot1X,
Extended,
__Unknown__(String),
}
impl Default for Dot11SecurityMode {
fn default() -> Dot11SecurityMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Dot11SecurityMode {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Dot11Cipher {
#[yaserde(rename = "CCMP")]
Ccmp,
#[yaserde(rename = "TKIP")]
Tkip,
Any,
Extended,
__Unknown__(String),
}
impl Default for Dot11Cipher {
fn default() -> Dot11Cipher {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Dot11Cipher {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Dot11PSK(pub String);
impl Validate for Dot11PSK {
fn validate(&self) -> Result<(), String> {
if self.0.len() != "32".parse().unwrap() {
return Err(format!(
"Length validation error. \nExpected: 0 length == 32 \nActual: 0 length == {}",
self.0.len()
));
}
Ok(())
}
}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Dot11PSKPassphrase(pub String);
impl Validate for Dot11PSKPassphrase {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11PSKSet {
// According to IEEE802.11-2007 H.4.1 the RSNA PSK consists of 256 bits, or
// 64 octets when represented in hex
#[yaserde(prefix = "tt", rename = "Key")]
pub key: Option<Dot11PSK>,
// According to IEEE802.11-2007 H.4.1 a pass-phrase is a sequence of between
// 8 and 63 ASCII-encoded characters and
// each character in the pass-phrase must have an encoding in the range of
// 32 to 126 (decimal),inclusive.
#[yaserde(prefix = "tt", rename = "Passphrase")]
pub passphrase: Option<Dot11PSKPassphrase>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<Dot11PSKSetExtension>,
}
impl Validate for Dot11PSKSet {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11PSKSetExtension {}
impl Validate for Dot11PSKSetExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkInterfaceSetConfigurationExtension2 {}
impl Validate for NetworkInterfaceSetConfigurationExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11Capabilities {
#[yaserde(prefix = "tt", rename = "TKIP")]
pub tkip: bool,
#[yaserde(prefix = "tt", rename = "ScanAvailableNetworks")]
pub scan_available_networks: bool,
#[yaserde(prefix = "tt", rename = "MultipleConfiguration")]
pub multiple_configuration: bool,
#[yaserde(prefix = "tt", rename = "AdHocStationMode")]
pub ad_hoc_station_mode: bool,
#[yaserde(prefix = "tt", rename = "WEP")]
pub wep: bool,
}
impl Validate for Dot11Capabilities {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Dot11SignalStrength(pub String);
impl Validate for Dot11SignalStrength {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11Status {
#[yaserde(prefix = "tt", rename = "SSID")]
pub ssid: Dot11SSIDType,
#[yaserde(prefix = "tt", rename = "BSSID")]
pub bssid: Option<String>,
#[yaserde(prefix = "tt", rename = "PairCipher")]
pub pair_cipher: Option<Dot11Cipher>,
#[yaserde(prefix = "tt", rename = "GroupCipher")]
pub group_cipher: Option<Dot11Cipher>,
#[yaserde(prefix = "tt", rename = "SignalStrength")]
pub signal_strength: Option<Dot11SignalStrength>,
#[yaserde(prefix = "tt", rename = "ActiveConfigAlias")]
pub active_config_alias: ReferenceToken,
}
impl Validate for Dot11Status {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Dot11AuthAndMangementSuite {
None,
Dot1X,
#[yaserde(rename = "PSK")]
Psk,
Extended,
__Unknown__(String),
}
impl Default for Dot11AuthAndMangementSuite {
fn default() -> Dot11AuthAndMangementSuite {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Dot11AuthAndMangementSuite {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11AvailableNetworks {
#[yaserde(prefix = "tt", rename = "SSID")]
pub ssid: Dot11SSIDType,
#[yaserde(prefix = "tt", rename = "BSSID")]
pub bssid: Option<String>,
// See IEEE802.11 7.3.2.25.2 for details.
#[yaserde(prefix = "tt", rename = "AuthAndMangementSuite")]
pub auth_and_mangement_suite: Vec<Dot11AuthAndMangementSuite>,
#[yaserde(prefix = "tt", rename = "PairCipher")]
pub pair_cipher: Vec<Dot11Cipher>,
#[yaserde(prefix = "tt", rename = "GroupCipher")]
pub group_cipher: Vec<Dot11Cipher>,
#[yaserde(prefix = "tt", rename = "SignalStrength")]
pub signal_strength: Option<Dot11SignalStrength>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<Dot11AvailableNetworksExtension>,
}
impl Validate for Dot11AvailableNetworks {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot11AvailableNetworksExtension {}
impl Validate for Dot11AvailableNetworksExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum CapabilityCategory {
All,
Analytics,
Device,
Events,
Imaging,
Media,
#[yaserde(rename = "PTZ")]
Ptz,
__Unknown__(String),
}
impl Default for CapabilityCategory {
fn default() -> CapabilityCategory {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for CapabilityCategory {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Capabilities {
// Analytics capabilities
#[yaserde(prefix = "tt", rename = "Analytics")]
pub analytics: Vec<AnalyticsCapabilities>,
// Device capabilities
#[yaserde(prefix = "tt", rename = "Device")]
pub device: Vec<DeviceCapabilities>,
// Event capabilities
#[yaserde(prefix = "tt", rename = "Events")]
pub events: Vec<EventCapabilities>,
// Imaging capabilities
#[yaserde(prefix = "tt", rename = "Imaging")]
pub imaging: Vec<ImagingCapabilities>,
// Media capabilities
#[yaserde(prefix = "tt", rename = "Media")]
pub media: Vec<MediaCapabilities>,
// PTZ capabilities
#[yaserde(prefix = "tt", rename = "PTZ")]
pub ptz: Vec<Ptzcapabilities>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<CapabilitiesExtension>,
}
impl Validate for Capabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CapabilitiesExtension {
#[yaserde(prefix = "tt", rename = "DeviceIO")]
pub device_io: Option<DeviceIOCapabilities>,
#[yaserde(prefix = "tt", rename = "Display")]
pub display: Option<DisplayCapabilities>,
#[yaserde(prefix = "tt", rename = "Recording")]
pub recording: Option<RecordingCapabilities>,
#[yaserde(prefix = "tt", rename = "Search")]
pub search: Option<SearchCapabilities>,
#[yaserde(prefix = "tt", rename = "Replay")]
pub replay: Option<ReplayCapabilities>,
#[yaserde(prefix = "tt", rename = "Receiver")]
pub receiver: Option<ReceiverCapabilities>,
#[yaserde(prefix = "tt", rename = "AnalyticsDevice")]
pub analytics_device: Option<AnalyticsDeviceCapabilities>,
#[yaserde(prefix = "tt", rename = "Extensions")]
pub extensions: Option<CapabilitiesExtension2>,
}
impl Validate for CapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CapabilitiesExtension2 {}
impl Validate for CapabilitiesExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsCapabilities {
// Analytics service URI.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Indicates whether or not rules are supported.
#[yaserde(prefix = "tt", rename = "RuleSupport")]
pub rule_support: bool,
// Indicates whether or not modules are supported.
#[yaserde(prefix = "tt", rename = "AnalyticsModuleSupport")]
pub analytics_module_support: bool,
}
impl Validate for AnalyticsCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DeviceCapabilities {
// Device service URI.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Network capabilities.
#[yaserde(prefix = "tt", rename = "Network")]
pub network: Option<NetworkCapabilities>,
// System capabilities.
#[yaserde(prefix = "tt", rename = "System")]
pub system: Option<SystemCapabilities>,
// I/O capabilities.
#[yaserde(prefix = "tt", rename = "IO")]
pub io: Option<Iocapabilities>,
// Security capabilities.
#[yaserde(prefix = "tt", rename = "Security")]
pub security: Option<SecurityCapabilities>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<DeviceCapabilitiesExtension>,
}
impl Validate for DeviceCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DeviceCapabilitiesExtension {}
impl Validate for DeviceCapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EventCapabilities {
// Event service URI.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Indicates whether or not WS Subscription policy is supported.
#[yaserde(prefix = "tt", rename = "WSSubscriptionPolicySupport")]
pub ws_subscription_policy_support: bool,
// Indicates whether or not WS Pull Point is supported.
#[yaserde(prefix = "tt", rename = "WSPullPointSupport")]
pub ws_pull_point_support: bool,
// Indicates whether or not WS Pausable Subscription Manager Interface is
// supported.
#[yaserde(
prefix = "tt",
rename = "WSPausableSubscriptionManagerInterfaceSupport"
)]
pub ws_pausable_subscription_manager_interface_support: bool,
}
impl Validate for EventCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Iocapabilities {
// Number of input connectors.
#[yaserde(prefix = "tt", rename = "InputConnectors")]
pub input_connectors: Option<i32>,
// Number of relay outputs.
#[yaserde(prefix = "tt", rename = "RelayOutputs")]
pub relay_outputs: Option<i32>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<IocapabilitiesExtension>,
}
impl Validate for Iocapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IocapabilitiesExtension {
#[yaserde(prefix = "tt", rename = "Auxiliary")]
pub auxiliary: Option<bool>,
#[yaserde(prefix = "tt", rename = "AuxiliaryCommands")]
pub auxiliary_commands: Vec<AuxiliaryData>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: IocapabilitiesExtension2,
}
impl Validate for IocapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IocapabilitiesExtension2 {}
impl Validate for IocapabilitiesExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MediaCapabilities {
// Media service URI.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Streaming capabilities.
#[yaserde(prefix = "tt", rename = "StreamingCapabilities")]
pub streaming_capabilities: RealTimeStreamingCapabilities,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MediaCapabilitiesExtension>,
}
impl Validate for MediaCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MediaCapabilitiesExtension {
#[yaserde(prefix = "tt", rename = "ProfileCapabilities")]
pub profile_capabilities: ProfileCapabilities,
}
impl Validate for MediaCapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RealTimeStreamingCapabilities {
// Indicates whether or not RTP multicast is supported.
#[yaserde(prefix = "tt", rename = "RTPMulticast")]
pub rtp_multicast: Option<bool>,
// Indicates whether or not RTP over TCP is supported.
#[yaserde(prefix = "tt", rename = "RTP_TCP")]
pub rtp_tcp: Option<bool>,
// Indicates whether or not RTP/RTSP/TCP is supported.
#[yaserde(prefix = "tt", rename = "RTP_RTSP_TCP")]
pub rtp_rtsp_tcp: Option<bool>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RealTimeStreamingCapabilitiesExtension>,
}
impl Validate for RealTimeStreamingCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RealTimeStreamingCapabilitiesExtension {}
impl Validate for RealTimeStreamingCapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ProfileCapabilities {
// Maximum number of profiles.
#[yaserde(prefix = "tt", rename = "MaximumNumberOfProfiles")]
pub maximum_number_of_profiles: i32,
}
impl Validate for ProfileCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkCapabilities {
// Indicates whether or not IP filtering is supported.
#[yaserde(prefix = "tt", rename = "IPFilter")]
pub ip_filter: Option<bool>,
// Indicates whether or not zeroconf is supported.
#[yaserde(prefix = "tt", rename = "ZeroConfiguration")]
pub zero_configuration: Option<bool>,
// Indicates whether or not IPv6 is supported.
#[yaserde(prefix = "tt", rename = "IPVersion6")]
pub ip_version_6: Option<bool>,
// Indicates whether or not is supported.
#[yaserde(prefix = "tt", rename = "DynDNS")]
pub dyn_dns: Option<bool>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkCapabilitiesExtension>,
}
impl Validate for NetworkCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkCapabilitiesExtension {
#[yaserde(prefix = "tt", rename = "Dot11Configuration")]
pub dot_11_configuration: Option<bool>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<NetworkCapabilitiesExtension2>,
}
impl Validate for NetworkCapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NetworkCapabilitiesExtension2 {}
impl Validate for NetworkCapabilitiesExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SecurityCapabilities {
// Indicates whether or not TLS 1.1 is supported.
#[yaserde(prefix = "tt", rename = "TLS1.1")]
pub tls1_1: bool,
// Indicates whether or not TLS 1.2 is supported.
#[yaserde(prefix = "tt", rename = "TLS1.2")]
pub tls1_2: bool,
// Indicates whether or not onboard key generation is supported.
#[yaserde(prefix = "tt", rename = "OnboardKeyGeneration")]
pub onboard_key_generation: bool,
// Indicates whether or not access policy configuration is supported.
#[yaserde(prefix = "tt", rename = "AccessPolicyConfig")]
pub access_policy_config: bool,
// Indicates whether or not WS-Security X.509 token is supported.
#[yaserde(prefix = "tt", rename = "X.509Token")]
pub x_509_token: bool,
// Indicates whether or not WS-Security SAML token is supported.
#[yaserde(prefix = "tt", rename = "SAMLToken")]
pub saml_token: bool,
// Indicates whether or not WS-Security Kerberos token is supported.
#[yaserde(prefix = "tt", rename = "KerberosToken")]
pub kerberos_token: bool,
// Indicates whether or not WS-Security REL token is supported.
#[yaserde(prefix = "tt", rename = "RELToken")]
pub rel_token: bool,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SecurityCapabilitiesExtension>,
}
impl Validate for SecurityCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SecurityCapabilitiesExtension {
#[yaserde(prefix = "tt", rename = "TLS1.0")]
pub tls1_0: bool,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SecurityCapabilitiesExtension2>,
}
impl Validate for SecurityCapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SecurityCapabilitiesExtension2 {
#[yaserde(prefix = "tt", rename = "Dot1X")]
pub dot_1x: bool,
// EAP Methods supported by the device. The int values refer to the
#[yaserde(prefix = "tt", rename = "SupportedEAPMethod")]
pub supported_eap_method: Vec<i32>,
#[yaserde(prefix = "tt", rename = "RemoteUserHandling")]
pub remote_user_handling: bool,
}
impl Validate for SecurityCapabilitiesExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemCapabilities {
// Indicates whether or not WS Discovery resolve requests are supported.
#[yaserde(prefix = "tt", rename = "DiscoveryResolve")]
pub discovery_resolve: bool,
// Indicates whether or not WS-Discovery Bye is supported.
#[yaserde(prefix = "tt", rename = "DiscoveryBye")]
pub discovery_bye: bool,
// Indicates whether or not remote discovery is supported.
#[yaserde(prefix = "tt", rename = "RemoteDiscovery")]
pub remote_discovery: bool,
// Indicates whether or not system backup is supported.
#[yaserde(prefix = "tt", rename = "SystemBackup")]
pub system_backup: bool,
// Indicates whether or not system logging is supported.
#[yaserde(prefix = "tt", rename = "SystemLogging")]
pub system_logging: bool,
// Indicates whether or not firmware upgrade is supported.
#[yaserde(prefix = "tt", rename = "FirmwareUpgrade")]
pub firmware_upgrade: bool,
// Indicates supported ONVIF version(s).
#[yaserde(prefix = "tt", rename = "SupportedVersions")]
pub supported_versions: Vec<OnvifVersion>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SystemCapabilitiesExtension>,
}
impl Validate for SystemCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemCapabilitiesExtension {
#[yaserde(prefix = "tt", rename = "HttpFirmwareUpgrade")]
pub http_firmware_upgrade: Option<bool>,
#[yaserde(prefix = "tt", rename = "HttpSystemBackup")]
pub http_system_backup: Option<bool>,
#[yaserde(prefix = "tt", rename = "HttpSystemLogging")]
pub http_system_logging: Option<bool>,
#[yaserde(prefix = "tt", rename = "HttpSupportInformation")]
pub http_support_information: Option<bool>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SystemCapabilitiesExtension2>,
}
impl Validate for SystemCapabilitiesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemCapabilitiesExtension2 {}
impl Validate for SystemCapabilitiesExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OnvifVersion {
// Major version number.
#[yaserde(prefix = "tt", rename = "Major")]
pub major: i32,
// Two digit minor version number.
// If major version number is less than "16", X.0.1 maps to "01" and X.2.1
// maps to "21" where X stands for Major version number.
// Otherwise, minor number is month of release, such as "06" for June.
#[yaserde(prefix = "tt", rename = "Minor")]
pub minor: i32,
}
impl Validate for OnvifVersion {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingCapabilities {
// Imaging service URI.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
}
impl Validate for ImagingCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptzcapabilities {
// PTZ service URI.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
}
impl Validate for Ptzcapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DeviceIOCapabilities {
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
#[yaserde(prefix = "tt", rename = "VideoSources")]
pub video_sources: i32,
#[yaserde(prefix = "tt", rename = "VideoOutputs")]
pub video_outputs: i32,
#[yaserde(prefix = "tt", rename = "AudioSources")]
pub audio_sources: i32,
#[yaserde(prefix = "tt", rename = "AudioOutputs")]
pub audio_outputs: i32,
#[yaserde(prefix = "tt", rename = "RelayOutputs")]
pub relay_outputs: i32,
}
impl Validate for DeviceIOCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DisplayCapabilities {
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Indication that the SetLayout command supports only predefined layouts.
#[yaserde(prefix = "tt", rename = "FixedLayout")]
pub fixed_layout: bool,
}
impl Validate for DisplayCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingCapabilities {
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
#[yaserde(prefix = "tt", rename = "ReceiverSource")]
pub receiver_source: bool,
#[yaserde(prefix = "tt", rename = "MediaProfileSource")]
pub media_profile_source: bool,
#[yaserde(prefix = "tt", rename = "DynamicRecordings")]
pub dynamic_recordings: bool,
#[yaserde(prefix = "tt", rename = "DynamicTracks")]
pub dynamic_tracks: bool,
#[yaserde(prefix = "tt", rename = "MaxStringLength")]
pub max_string_length: i32,
}
impl Validate for RecordingCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SearchCapabilities {
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
#[yaserde(prefix = "tt", rename = "MetadataSearch")]
pub metadata_search: bool,
}
impl Validate for SearchCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReplayCapabilities {
// The address of the replay service.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
}
impl Validate for ReplayCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReceiverCapabilities {
// The address of the receiver service.
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Indicates whether the device can receive RTP multicast streams.
#[yaserde(prefix = "tt", rename = "RTP_Multicast")]
pub rtp_multicast: bool,
// Indicates whether the device can receive RTP/TCP streams
#[yaserde(prefix = "tt", rename = "RTP_TCP")]
pub rtp_tcp: bool,
// Indicates whether the device can receive RTP/RTSP/TCP streams.
#[yaserde(prefix = "tt", rename = "RTP_RTSP_TCP")]
pub rtp_rtsp_tcp: bool,
// The maximum number of receivers supported by the device.
#[yaserde(prefix = "tt", rename = "SupportedReceivers")]
pub supported_receivers: i32,
// The maximum allowed length for RTSP URIs.
#[yaserde(prefix = "tt", rename = "MaximumRTSPURILength")]
pub maximum_rtspuri_length: i32,
}
impl Validate for ReceiverCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsDeviceCapabilities {
#[yaserde(prefix = "tt", rename = "XAddr")]
pub x_addr: String,
// Obsolete property.
#[yaserde(prefix = "tt", rename = "RuleSupport")]
pub rule_support: Option<bool>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AnalyticsDeviceExtension>,
}
impl Validate for AnalyticsDeviceCapabilities {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsDeviceExtension {}
impl Validate for AnalyticsDeviceExtension {}
// Enumeration describing the available system log modes.
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum SystemLogType {
// Indicates that a system log is requested.
System,
// Indicates that a access log is requested.
Access,
__Unknown__(String),
}
impl Default for SystemLogType {
fn default() -> SystemLogType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for SystemLogType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemLog {
// The log information as attachment data.
#[yaserde(prefix = "tt", rename = "Binary")]
pub binary: Option<AttachmentData>,
// The log information as character data.
#[yaserde(prefix = "tt", rename = "String")]
pub string: Option<String>,
}
impl Validate for SystemLog {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SupportInformation {
// The support information as attachment data.
#[yaserde(prefix = "tt", rename = "Binary")]
pub binary: Option<AttachmentData>,
// The support information as character data.
#[yaserde(prefix = "tt", rename = "String")]
pub string: Option<String>,
}
impl Validate for SupportInformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct BinaryData {
// base64 encoded binary data.
#[yaserde(prefix = "tt", rename = "Data")]
pub data: String,
#[yaserde(attribute, prefix = "xmime" rename = "contentType")]
pub content_type: Option<xmime::ContentType>,
}
impl Validate for BinaryData {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AttachmentData {
#[yaserde(prefix = "xop", rename = "Include")]
pub include: xop::Include,
#[yaserde(attribute, prefix = "xmime" rename = "contentType")]
pub content_type: Option<xmime::ContentType>,
}
impl Validate for AttachmentData {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct BackupFile {
#[yaserde(prefix = "tt", rename = "Name")]
pub name: String,
#[yaserde(prefix = "tt", rename = "Data")]
pub data: AttachmentData,
}
impl Validate for BackupFile {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemLogUriList {
#[yaserde(prefix = "tt", rename = "SystemLog")]
pub system_log: Vec<SystemLogUri>,
}
impl Validate for SystemLogUriList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemLogUri {
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: SystemLogType,
#[yaserde(prefix = "tt", rename = "Uri")]
pub uri: String,
}
impl Validate for SystemLogUri {}
// Enumeration describing the available factory default modes.
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum FactoryDefaultType {
// Indicates that a hard factory default is requested.
Hard,
// Indicates that a soft factory default is requested.
Soft,
__Unknown__(String),
}
impl Default for FactoryDefaultType {
fn default() -> FactoryDefaultType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for FactoryDefaultType {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum SetDateTimeType {
// Indicates that the date and time are set manually.
Manual,
// Indicates that the date and time are set through NTP
#[yaserde(rename = "NTP")]
Ntp,
__Unknown__(String),
}
impl Default for SetDateTimeType {
fn default() -> SetDateTimeType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for SetDateTimeType {}
// General date time inforamtion returned by the GetSystemDateTime method.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemDateTime {
// Indicates if the time is set manully or through NTP.
#[yaserde(prefix = "tt", rename = "DateTimeType")]
pub date_time_type: SetDateTimeType,
// Informative indicator whether daylight savings is currently on/off.
#[yaserde(prefix = "tt", rename = "DaylightSavings")]
pub daylight_savings: bool,
// Timezone information in Posix format.
#[yaserde(prefix = "tt", rename = "TimeZone")]
pub time_zone: Option<TimeZone>,
// Current system date and time in UTC format. This field is mandatory since
// version 2.0.
#[yaserde(prefix = "tt", rename = "UTCDateTime")]
pub utc_date_time: Option<DateTime>,
// Date and time in local format.
#[yaserde(prefix = "tt", rename = "LocalDateTime")]
pub local_date_time: Option<DateTime>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SystemDateTimeExtension>,
}
impl Validate for SystemDateTime {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SystemDateTimeExtension {}
impl Validate for SystemDateTimeExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DateTime {
#[yaserde(prefix = "tt", rename = "Time")]
pub time: Time,
#[yaserde(prefix = "tt", rename = "Date")]
pub date: Date,
}
impl Validate for DateTime {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Date {
#[yaserde(prefix = "tt", rename = "Year")]
pub year: i32,
// Range is 1 to 12.
#[yaserde(prefix = "tt", rename = "Month")]
pub month: i32,
// Range is 1 to 31.
#[yaserde(prefix = "tt", rename = "Day")]
pub day: i32,
}
impl Validate for Date {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Time {
// Range is 0 to 23.
#[yaserde(prefix = "tt", rename = "Hour")]
pub hour: i32,
// Range is 0 to 59.
#[yaserde(prefix = "tt", rename = "Minute")]
pub minute: i32,
// Range is 0 to 61 (typically 59).
#[yaserde(prefix = "tt", rename = "Second")]
pub second: i32,
}
impl Validate for Time {}
// The TZ format is specified by POSIX, please refer to POSIX 1003.1 section 8.3
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct TimeZone {
// Posix timezone string.
#[yaserde(prefix = "tt", rename = "TZ")]
pub tz: String,
}
impl Validate for TimeZone {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RemoteUser {
#[yaserde(prefix = "tt", rename = "Username")]
pub username: String,
#[yaserde(prefix = "tt", rename = "Password")]
pub password: Option<String>,
#[yaserde(prefix = "tt", rename = "UseDerivedPassword")]
pub use_derived_password: bool,
}
impl Validate for RemoteUser {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum UserLevel {
Administrator,
Operator,
User,
Anonymous,
Extended,
__Unknown__(String),
}
impl Default for UserLevel {
fn default() -> UserLevel {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for UserLevel {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct User {
// Username string.
#[yaserde(prefix = "tt", rename = "Username")]
pub username: String,
// Password string.
#[yaserde(prefix = "tt", rename = "Password")]
pub password: Option<String>,
// User level string.
#[yaserde(prefix = "tt", rename = "UserLevel")]
pub user_level: UserLevel,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<UserExtension>,
}
impl Validate for User {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct UserExtension {}
impl Validate for UserExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateGenerationParameters {
#[yaserde(prefix = "tt", rename = "CertificateID")]
pub certificate_id: Option<String>,
#[yaserde(prefix = "tt", rename = "Subject")]
pub subject: Option<String>,
#[yaserde(prefix = "tt", rename = "ValidNotBefore")]
pub valid_not_before: Option<String>,
#[yaserde(prefix = "tt", rename = "ValidNotAfter")]
pub valid_not_after: Option<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<CertificateGenerationParametersExtension>,
}
impl Validate for CertificateGenerationParameters {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateGenerationParametersExtension {}
impl Validate for CertificateGenerationParametersExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Certificate {
// Certificate id.
#[yaserde(prefix = "tt", rename = "CertificateID")]
pub certificate_id: String,
// base64 encoded DER representation of certificate.
#[yaserde(prefix = "tt", rename = "Certificate")]
pub certificate: BinaryData,
}
impl Validate for Certificate {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateStatus {
// Certificate id.
#[yaserde(prefix = "tt", rename = "CertificateID")]
pub certificate_id: String,
// Indicates whether or not a certificate is used in a HTTPS configuration.
#[yaserde(prefix = "tt", rename = "Status")]
pub status: bool,
}
impl Validate for CertificateStatus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateWithPrivateKey {
#[yaserde(prefix = "tt", rename = "CertificateID")]
pub certificate_id: Option<String>,
#[yaserde(prefix = "tt", rename = "Certificate")]
pub certificate: BinaryData,
#[yaserde(prefix = "tt", rename = "PrivateKey")]
pub private_key: BinaryData,
}
impl Validate for CertificateWithPrivateKey {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateInformation {
#[yaserde(prefix = "tt", rename = "CertificateID")]
pub certificate_id: String,
#[yaserde(prefix = "tt", rename = "IssuerDN")]
pub issuer_dn: Option<String>,
#[yaserde(prefix = "tt", rename = "SubjectDN")]
pub subject_dn: Option<String>,
#[yaserde(prefix = "tt", rename = "KeyUsage")]
pub key_usage: Option<CertificateUsage>,
#[yaserde(prefix = "tt", rename = "ExtendedKeyUsage")]
pub extended_key_usage: Option<CertificateUsage>,
#[yaserde(prefix = "tt", rename = "KeyLength")]
pub key_length: Option<i32>,
#[yaserde(prefix = "tt", rename = "Version")]
pub version: Option<String>,
#[yaserde(prefix = "tt", rename = "SerialNum")]
pub serial_num: Option<String>,
// Validity Range is from "NotBefore" to "NotAfter"; the corresponding
// DateTimeRange is from "From" to "Until"
#[yaserde(prefix = "tt", rename = "SignatureAlgorithm")]
pub signature_algorithm: Option<String>,
#[yaserde(prefix = "tt", rename = "Validity")]
pub validity: Option<DateTimeRange>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<CertificateInformationExtension>,
}
impl Validate for CertificateInformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateUsage {
#[yaserde(attribute, rename = "Critical")]
pub critical: bool,
}
impl Validate for CertificateUsage {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CertificateInformationExtension {}
impl Validate for CertificateInformationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot1XConfiguration {
#[yaserde(prefix = "tt", rename = "Dot1XConfigurationToken")]
pub dot_1x_configuration_token: ReferenceToken,
#[yaserde(prefix = "tt", rename = "Identity")]
pub identity: String,
#[yaserde(prefix = "tt", rename = "AnonymousID")]
pub anonymous_id: Option<String>,
// EAP Method type as defined in
#[yaserde(prefix = "tt", rename = "EAPMethod")]
pub eap_method: i32,
#[yaserde(prefix = "tt", rename = "CACertificateID")]
pub ca_certificate_id: Vec<String>,
#[yaserde(prefix = "tt", rename = "EAPMethodConfiguration")]
pub eap_method_configuration: Option<EapmethodConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<Dot1XConfigurationExtension>,
}
impl Validate for Dot1XConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Dot1XConfigurationExtension {}
impl Validate for Dot1XConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EapmethodConfiguration {
// Confgiuration information for TLS Method.
#[yaserde(prefix = "tt", rename = "TLSConfiguration")]
pub tls_configuration: Option<Tlsconfiguration>,
// Password for those EAP Methods that require a password. The password
// shall never be returned on a get method.
#[yaserde(prefix = "tt", rename = "Password")]
pub password: Option<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<EapMethodExtension>,
}
impl Validate for EapmethodConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EapMethodExtension {}
impl Validate for EapMethodExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Tlsconfiguration {
#[yaserde(prefix = "tt", rename = "CertificateID")]
pub certificate_id: String,
}
impl Validate for Tlsconfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct GenericEapPwdConfigurationExtension {}
impl Validate for GenericEapPwdConfigurationExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum RelayLogicalState {
#[yaserde(rename = "active")]
Active,
#[yaserde(rename = "inactive")]
Inactive,
__Unknown__(String),
}
impl Default for RelayLogicalState {
fn default() -> RelayLogicalState {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for RelayLogicalState {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum RelayIdleState {
#[yaserde(rename = "closed")]
Closed,
#[yaserde(rename = "open")]
Open,
__Unknown__(String),
}
impl Default for RelayIdleState {
fn default() -> RelayIdleState {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for RelayIdleState {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RelayOutputSettings {
// 'Bistable' or 'Monostable'
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: RelayMode,
// Time after which the relay returns to its idle state if it is in
// monostable mode. If the Mode field is set to bistable mode the value of
// the parameter can be ignored.
#[yaserde(prefix = "tt", rename = "DelayTime")]
pub delay_time: xs::Duration,
// 'open' or 'closed'
#[yaserde(prefix = "tt", rename = "IdleState")]
pub idle_state: RelayIdleState,
}
impl Validate for RelayOutputSettings {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum RelayMode {
Monostable,
Bistable,
__Unknown__(String),
}
impl Default for RelayMode {
fn default() -> RelayMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for RelayMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RelayOutput {
#[yaserde(prefix = "tt", rename = "Properties")]
pub properties: RelayOutputSettings,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for RelayOutput {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum DigitalIdleState {
#[yaserde(rename = "closed")]
Closed,
#[yaserde(rename = "open")]
Open,
__Unknown__(String),
}
impl Default for DigitalIdleState {
fn default() -> DigitalIdleState {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for DigitalIdleState {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DigitalInput {
// Indicate the Digital IdleState status.
#[yaserde(attribute, rename = "IdleState")]
pub idle_state: Option<DigitalIdleState>,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for DigitalInput {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptznode {
// A unique identifier that is used to reference PTZ Nodes.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Option<Name>,
// A list of Coordinate Systems available for the PTZ Node. For each
// Coordinate System, the PTZ Node MUST specify its allowed range.
#[yaserde(prefix = "tt", rename = "SupportedPTZSpaces")]
pub supported_ptz_spaces: Ptzspaces,
// All preset operations MUST be available for this PTZ Node if one preset
// is supported.
#[yaserde(prefix = "tt", rename = "MaximumNumberOfPresets")]
pub maximum_number_of_presets: i32,
// A boolean operator specifying the availability of a home position. If set
// to true, the Home Position Operations MUST be available for this PTZ
// Node.
#[yaserde(prefix = "tt", rename = "HomeSupported")]
pub home_supported: bool,
// A list of supported Auxiliary commands. If the list is not empty, the
// Auxiliary Operations MUST be available for this PTZ Node.
#[yaserde(prefix = "tt", rename = "AuxiliaryCommands")]
pub auxiliary_commands: Vec<AuxiliaryData>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtznodeExtension>,
// Indication whether the HomePosition of a Node is fixed or it can be
// changed via the SetHomePosition command.
#[yaserde(attribute, rename = "FixedHomePosition")]
pub fixed_home_position: Option<bool>,
// Indication whether the Node supports the geo-referenced move command.
#[yaserde(attribute, rename = "GeoMove")]
pub geo_move: Option<bool>,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for Ptznode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtznodeExtension {
// Detail of supported Preset Tour feature.
#[yaserde(prefix = "tt", rename = "SupportedPresetTour")]
pub supported_preset_tour: Option<PtzpresetTourSupported>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtznodeExtension2>,
}
impl Validate for PtznodeExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtznodeExtension2 {}
impl Validate for PtznodeExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourSupported {
// Indicates number of preset tours that can be created. Required preset
// tour operations shall be available for this PTZ Node if one or more
// preset tour is supported.
#[yaserde(prefix = "tt", rename = "MaximumNumberOfPresetTours")]
pub maximum_number_of_preset_tours: i32,
// Indicates which preset tour operations are available for this PTZ Node.
#[yaserde(prefix = "tt", rename = "PTZPresetTourOperation")]
pub ptz_preset_tour_operation: Vec<PtzpresetTourOperation>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourSupportedExtension>,
}
impl Validate for PtzpresetTourSupported {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourSupportedExtension {}
impl Validate for PtzpresetTourSupportedExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptzconfiguration {
// A mandatory reference to the PTZ Node that the PTZ Configuration belongs
// to.
#[yaserde(prefix = "tt", rename = "NodeToken")]
pub node_token: ReferenceToken,
// If the PTZ Node supports absolute Pan/Tilt movements, it shall specify
// one Absolute Pan/Tilt Position Space as default.
#[yaserde(prefix = "tt", rename = "DefaultAbsolutePantTiltPositionSpace")]
pub default_absolute_pant_tilt_position_space: Option<String>,
// If the PTZ Node supports absolute zoom movements, it shall specify one
// Absolute Zoom Position Space as default.
#[yaserde(prefix = "tt", rename = "DefaultAbsoluteZoomPositionSpace")]
pub default_absolute_zoom_position_space: Option<String>,
// If the PTZ Node supports relative Pan/Tilt movements, it shall specify
// one RelativePan/Tilt Translation Space as default.
#[yaserde(prefix = "tt", rename = "DefaultRelativePanTiltTranslationSpace")]
pub default_relative_pan_tilt_translation_space: Option<String>,
// If the PTZ Node supports relative zoom movements, it shall specify one
// Relative Zoom Translation Space as default.
#[yaserde(prefix = "tt", rename = "DefaultRelativeZoomTranslationSpace")]
pub default_relative_zoom_translation_space: Option<String>,
// If the PTZ Node supports continuous Pan/Tilt movements, it shall specify
// one Continuous Pan/Tilt Velocity Space as default.
#[yaserde(prefix = "tt", rename = "DefaultContinuousPanTiltVelocitySpace")]
pub default_continuous_pan_tilt_velocity_space: Option<String>,
// If the PTZ Node supports continuous zoom movements, it shall specify one
// Continuous Zoom Velocity Space as default.
#[yaserde(prefix = "tt", rename = "DefaultContinuousZoomVelocitySpace")]
pub default_continuous_zoom_velocity_space: Option<String>,
// If the PTZ Node supports absolute or relative PTZ movements, it shall
// specify corresponding default Pan/Tilt and Zoom speeds.
#[yaserde(prefix = "tt", rename = "DefaultPTZSpeed")]
pub default_ptz_speed: Option<Ptzspeed>,
// If the PTZ Node supports continuous movements, it shall specify a default
// timeout, after which the movement stops.
#[yaserde(prefix = "tt", rename = "DefaultPTZTimeout")]
pub default_ptz_timeout: Option<xs::Duration>,
// The Pan/Tilt limits element should be present for a PTZ Node that
// supports an absolute Pan/Tilt. If the element is present it signals the
// support for configurable Pan/Tilt limits. If limits are enabled, the
// Pan/Tilt movements shall always stay within the specified range. The
// Pan/Tilt limits are disabled by setting the limits to –INF or +INF.
#[yaserde(prefix = "tt", rename = "PanTiltLimits")]
pub pan_tilt_limits: Option<PanTiltLimits>,
// The Zoom limits element should be present for a PTZ Node that supports
// absolute zoom. If the element is present it signals the supports for
// configurable Zoom limits. If limits are enabled the zoom movements shall
// always stay within the specified range. The Zoom limits are disabled by
// settings the limits to -INF and +INF.
#[yaserde(prefix = "tt", rename = "ZoomLimits")]
pub zoom_limits: Option<ZoomLimits>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzconfigurationExtension>,
// The optional acceleration ramp used by the device when moving.
#[yaserde(attribute, rename = "MoveRamp")]
pub move_ramp: Option<i32>,
// The optional acceleration ramp used by the device when recalling presets.
#[yaserde(attribute, rename = "PresetRamp")]
pub preset_ramp: Option<i32>,
// The optional acceleration ramp used by the device when executing
// PresetTours.
#[yaserde(attribute, rename = "PresetTourRamp")]
pub preset_tour_ramp: Option<i32>,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for Ptzconfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzconfigurationExtension {
// Optional element to configure PT Control Direction related features.
#[yaserde(prefix = "tt", rename = "PTControlDirection")]
pub pt_control_direction: Option<PtcontrolDirection>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzconfigurationExtension2>,
}
impl Validate for PtzconfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzconfigurationExtension2 {}
impl Validate for PtzconfigurationExtension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtcontrolDirection {
// Optional element to configure related parameters for E-Flip.
#[yaserde(prefix = "tt", rename = "EFlip")]
pub e_flip: Option<Eflip>,
// Optional element to configure related parameters for reversing of PT
// Control Direction.
#[yaserde(prefix = "tt", rename = "Reverse")]
pub reverse: Option<Reverse>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtcontrolDirectionExtension>,
}
impl Validate for PtcontrolDirection {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtcontrolDirectionExtension {}
impl Validate for PtcontrolDirectionExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Eflip {
// Parameter to enable/disable E-Flip feature.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: EflipMode,
}
impl Validate for Eflip {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Reverse {
// Parameter to enable/disable Reverse feature.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: ReverseMode,
}
impl Validate for Reverse {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum EflipMode {
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "ON")]
On,
Extended,
__Unknown__(String),
}
impl Default for EflipMode {
fn default() -> EflipMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for EflipMode {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ReverseMode {
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "ON")]
On,
#[yaserde(rename = "AUTO")]
Auto,
Extended,
__Unknown__(String),
}
impl Default for ReverseMode {
fn default() -> ReverseMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ReverseMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzconfigurationOptions {
// A list of supported coordinate systems including their range limitations.
#[yaserde(prefix = "tt", rename = "Spaces")]
pub spaces: Ptzspaces,
// A timeout Range within which Timeouts are accepted by the PTZ Node.
#[yaserde(prefix = "tt", rename = "PTZTimeout")]
pub ptz_timeout: DurationRange,
// Supported options for PT Direction Control.
#[yaserde(prefix = "tt", rename = "PTControlDirection")]
pub pt_control_direction: Option<PtcontrolDirectionOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzconfigurationOptions2>,
// The list of acceleration ramps supported by the device. The
// smallest acceleration value corresponds to the minimal index, the
// highest acceleration corresponds to the maximum index.
#[yaserde(attribute, rename = "PTZRamps")]
pub ptz_ramps: Option<IntAttrList>,
}
impl Validate for PtzconfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzconfigurationOptions2 {}
impl Validate for PtzconfigurationOptions2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtcontrolDirectionOptions {
// Supported options for EFlip feature.
#[yaserde(prefix = "tt", rename = "EFlip")]
pub e_flip: Option<EflipOptions>,
// Supported options for Reverse feature.
#[yaserde(prefix = "tt", rename = "Reverse")]
pub reverse: Option<ReverseOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtcontrolDirectionOptionsExtension>,
}
impl Validate for PtcontrolDirectionOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtcontrolDirectionOptionsExtension {}
impl Validate for PtcontrolDirectionOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EflipOptions {
// Options of EFlip mode parameter.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<EflipMode>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<EflipOptionsExtension>,
}
impl Validate for EflipOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EflipOptionsExtension {}
impl Validate for EflipOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReverseOptions {
// Options of Reverse mode parameter.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<ReverseMode>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ReverseOptionsExtension>,
}
impl Validate for ReverseOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReverseOptionsExtension {}
impl Validate for ReverseOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PanTiltLimits {
// A range of pan tilt limits.
#[yaserde(prefix = "tt", rename = "Range")]
pub range: Space2DDescription,
}
impl Validate for PanTiltLimits {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ZoomLimits {
// A range of zoom limit
#[yaserde(prefix = "tt", rename = "Range")]
pub range: Space1DDescription,
}
impl Validate for ZoomLimits {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptzspaces {
// The Generic Pan/Tilt Position space is provided by every PTZ node that
// supports absolute Pan/Tilt, since it does not relate to a specific
// physical range.
// Instead, the range should be defined as the full range of the PTZ unit
// normalized to the range -1 to 1 resulting in the following space
// description.
#[yaserde(prefix = "tt", rename = "AbsolutePanTiltPositionSpace")]
pub absolute_pan_tilt_position_space: Vec<Space2DDescription>,
// The Generic Zoom Position Space is provided by every PTZ node that
// supports absolute Zoom, since it does not relate to a specific physical
// range.
// Instead, the range should be defined as the full range of the Zoom
// normalized to the range 0 (wide) to 1 (tele).
// There is no assumption about how the generic zoom range is mapped to
// magnification, FOV or other physical zoom dimension.
#[yaserde(prefix = "tt", rename = "AbsoluteZoomPositionSpace")]
pub absolute_zoom_position_space: Vec<Space1DDescription>,
// The Generic Pan/Tilt translation space is provided by every PTZ node that
// supports relative Pan/Tilt, since it does not relate to a specific
// physical range.
// Instead, the range should be defined as the full positive and negative
// translation range of the PTZ unit normalized to the range -1 to 1,
// where positive translation would mean clockwise rotation or movement in
// right/up direction resulting in the following space description.
#[yaserde(prefix = "tt", rename = "RelativePanTiltTranslationSpace")]
pub relative_pan_tilt_translation_space: Vec<Space2DDescription>,
// The Generic Zoom Translation Space is provided by every PTZ node that
// supports relative Zoom, since it does not relate to a specific physical
// range.
// Instead, the corresponding absolute range should be defined as the full
// positive and negative translation range of the Zoom normalized to the
// range -1 to1,
// where a positive translation maps to a movement in TELE direction. The
// translation is signed to indicate direction (negative is to wide,
// positive is to tele).
// There is no assumption about how the generic zoom range is mapped to
// magnification, FOV or other physical zoom dimension. This results in the
// following space description.
#[yaserde(prefix = "tt", rename = "RelativeZoomTranslationSpace")]
pub relative_zoom_translation_space: Vec<Space1DDescription>,
// The generic Pan/Tilt velocity space shall be provided by every PTZ node,
// since it does not relate to a specific physical range.
// Instead, the range should be defined as a range of the PTZ unit’s speed
// normalized to the range -1 to 1, where a positive velocity would map to
// clockwise
// rotation or movement in the right/up direction. A signed speed can be
// independently specified for the pan and tilt component resulting in the
// following space description.
#[yaserde(prefix = "tt", rename = "ContinuousPanTiltVelocitySpace")]
pub continuous_pan_tilt_velocity_space: Vec<Space2DDescription>,
// The generic zoom velocity space specifies a zoom factor velocity without
// knowing the underlying physical model. The range should be normalized
// from -1 to 1,
// where a positive velocity would map to TELE direction. A generic zoom
// velocity space description resembles the following.
#[yaserde(prefix = "tt", rename = "ContinuousZoomVelocitySpace")]
pub continuous_zoom_velocity_space: Vec<Space1DDescription>,
// The speed space specifies the speed for a Pan/Tilt movement when moving
// to an absolute position or to a relative translation.
// In contrast to the velocity spaces, speed spaces do not contain any
// directional information. The speed of a combined Pan/Tilt
// movement is represented by a single non-negative scalar value.
#[yaserde(prefix = "tt", rename = "PanTiltSpeedSpace")]
pub pan_tilt_speed_space: Vec<Space1DDescription>,
// The speed space specifies the speed for a Zoom movement when moving to an
// absolute position or to a relative translation.
// In contrast to the velocity spaces, speed spaces do not contain any
// directional information.
#[yaserde(prefix = "tt", rename = "ZoomSpeedSpace")]
pub zoom_speed_space: Vec<Space1DDescription>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzspacesExtension>,
}
impl Validate for Ptzspaces {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzspacesExtension {}
impl Validate for PtzspacesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Space2DDescription {
// A URI of coordinate systems.
#[yaserde(prefix = "tt", rename = "URI")]
pub uri: String,
// A range of x-axis.
#[yaserde(prefix = "tt", rename = "XRange")]
pub x_range: FloatRange,
// A range of y-axis.
#[yaserde(prefix = "tt", rename = "YRange")]
pub y_range: FloatRange,
}
impl Validate for Space2DDescription {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Space1DDescription {
// A URI of coordinate systems.
#[yaserde(prefix = "tt", rename = "URI")]
pub uri: String,
// A range of x-axis.
#[yaserde(prefix = "tt", rename = "XRange")]
pub x_range: FloatRange,
}
impl Validate for Space1DDescription {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptzspeed {
// Pan and tilt speed. The x component corresponds to pan and the y
// component to tilt. If omitted in a request, the current (if any) PanTilt
// movement should not be affected.
#[yaserde(prefix = "tt", rename = "PanTilt")]
pub pan_tilt: Option<Vector2D>,
// A zoom speed. If omitted in a request, the current (if any) Zoom movement
// should not be affected.
#[yaserde(prefix = "tt", rename = "Zoom")]
pub zoom: Option<Vector1D>,
}
impl Validate for Ptzspeed {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Ptzpreset {
// A list of preset position name.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Option<Name>,
// A list of preset position.
#[yaserde(prefix = "tt", rename = "PTZPosition")]
pub ptz_position: Option<Ptzvector>,
#[yaserde(attribute, rename = "token")]
pub token: Option<ReferenceToken>,
}
impl Validate for Ptzpreset {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct AuxiliaryData(pub String);
impl Validate for AuxiliaryData {
fn validate(&self) -> Result<(), String> {
if self.0.len() > "128".parse().unwrap() {
return Err(format!(
"MaxLength validation error. \nExpected: 0 length <= 128 \nActual: 0 length == {}",
self.0.len()
));
}
Ok(())
}
}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum PtzpresetTourState {
Idle,
Touring,
Paused,
Extended,
__Unknown__(String),
}
impl Default for PtzpresetTourState {
fn default() -> PtzpresetTourState {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for PtzpresetTourState {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum PtzpresetTourDirection {
Forward,
Backward,
Extended,
__Unknown__(String),
}
impl Default for PtzpresetTourDirection {
fn default() -> PtzpresetTourDirection {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for PtzpresetTourDirection {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum PtzpresetTourOperation {
Start,
Stop,
Pause,
Extended,
__Unknown__(String),
}
impl Default for PtzpresetTourOperation {
fn default() -> PtzpresetTourOperation {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for PtzpresetTourOperation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PresetTour {
// Readable name of the preset tour.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Option<Name>,
// Read only parameters to indicate the status of the preset tour.
#[yaserde(prefix = "tt", rename = "Status")]
pub status: PtzpresetTourStatus,
// Auto Start flag of the preset tour. True allows the preset tour to be
// activated always.
#[yaserde(prefix = "tt", rename = "AutoStart")]
pub auto_start: bool,
// Parameters to specify the detail behavior of the preset tour.
#[yaserde(prefix = "tt", rename = "StartingCondition")]
pub starting_condition: PtzpresetTourStartingCondition,
// A list of detail of touring spots including preset positions.
#[yaserde(prefix = "tt", rename = "TourSpot")]
pub tour_spot: Vec<PtzpresetTourSpot>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourExtension>,
// Unique identifier of this preset tour.
#[yaserde(attribute, rename = "token")]
pub token: Option<ReferenceToken>,
}
impl Validate for PresetTour {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourExtension {}
impl Validate for PtzpresetTourExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourSpot {
// Detail definition of preset position of the tour spot.
#[yaserde(prefix = "tt", rename = "PresetDetail")]
pub preset_detail: PtzpresetTourPresetDetail,
// Optional parameter to specify Pan/Tilt and Zoom speed on moving toward
// this tour spot.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: Option<Ptzspeed>,
// Optional parameter to specify time duration of staying on this tour
// sport.
#[yaserde(prefix = "tt", rename = "StayTime")]
pub stay_time: Option<xs::Duration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourSpotExtension>,
}
impl Validate for PtzpresetTourSpot {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourSpotExtension {}
impl Validate for PtzpresetTourSpotExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourPresetDetail {
#[yaserde(prefix = "tt", rename = "PTZPresetTourPresetDetailChoice")]
pub ptz_preset_tour_preset_detail_choice:
ptz_preset_tour_preset_detail::PtzpresetTourPresetDetailChoice,
}
impl Validate for PtzpresetTourPresetDetail {}
pub mod ptz_preset_tour_preset_detail {
use super::*;
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum PtzpresetTourPresetDetailChoice {
// Option to specify the preset position with Preset Token defined in
// advance.
PresetToken(ReferenceToken),
// Option to specify the preset position with the home position of this PTZ
// Node. "False" to this parameter shall be treated as an invalid argument.
Home(bool),
// Option to specify the preset position with vector of PTZ node directly.
#[yaserde(rename = "PTZPosition")]
Ptzposition(Ptzvector),
TypeExtension(PtzpresetTourTypeExtension),
__Unknown__(String),
}
impl Default for PtzpresetTourPresetDetailChoice {
fn default() -> PtzpresetTourPresetDetailChoice {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for PtzpresetTourPresetDetailChoice {}
}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourTypeExtension {}
impl Validate for PtzpresetTourTypeExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourStatus {
// Indicates state of this preset tour by Idle/Touring/Paused.
#[yaserde(prefix = "tt", rename = "State")]
pub state: PtzpresetTourState,
// Indicates a tour spot currently staying.
#[yaserde(prefix = "tt", rename = "CurrentTourSpot")]
pub current_tour_spot: Option<PtzpresetTourSpot>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourStatusExtension>,
}
impl Validate for PtzpresetTourStatus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourStatusExtension {}
impl Validate for PtzpresetTourStatusExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourStartingCondition {
// Optional parameter to specify how many times the preset tour is recurred.
#[yaserde(prefix = "tt", rename = "RecurringTime")]
pub recurring_time: Option<i32>,
// Optional parameter to specify how long time duration the preset tour is
// recurred.
#[yaserde(prefix = "tt", rename = "RecurringDuration")]
pub recurring_duration: Option<xs::Duration>,
// Optional parameter to choose which direction the preset tour goes.
// Forward shall be chosen in case it is omitted.
#[yaserde(prefix = "tt", rename = "Direction")]
pub direction: Option<PtzpresetTourDirection>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourStartingConditionExtension>,
// Execute presets in random order. If set to true and Direction is also
// present, Direction will be ignored and presets of the Tour will be
// recalled randomly.
#[yaserde(attribute, rename = "RandomPresetOrder")]
pub random_preset_order: Option<bool>,
}
impl Validate for PtzpresetTourStartingCondition {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourStartingConditionExtension {}
impl Validate for PtzpresetTourStartingConditionExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourOptions {
// Indicates whether or not the AutoStart is supported.
#[yaserde(prefix = "tt", rename = "AutoStart")]
pub auto_start: bool,
// Supported options for Preset Tour Starting Condition.
#[yaserde(prefix = "tt", rename = "StartingCondition")]
pub starting_condition: PtzpresetTourStartingConditionOptions,
// Supported options for Preset Tour Spot.
#[yaserde(prefix = "tt", rename = "TourSpot")]
pub tour_spot: PtzpresetTourSpotOptions,
}
impl Validate for PtzpresetTourOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourSpotOptions {
// Supported options for detail definition of preset position of the tour
// spot.
#[yaserde(prefix = "tt", rename = "PresetDetail")]
pub preset_detail: PtzpresetTourPresetDetailOptions,
// Supported range of stay time for a tour spot.
#[yaserde(prefix = "tt", rename = "StayTime")]
pub stay_time: DurationRange,
}
impl Validate for PtzpresetTourSpotOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourPresetDetailOptions {
// A list of available Preset Tokens for tour spots.
#[yaserde(prefix = "tt", rename = "PresetToken")]
pub preset_token: Vec<ReferenceToken>,
// An option to indicate Home postion for tour spots.
#[yaserde(prefix = "tt", rename = "Home")]
pub home: Option<bool>,
// Supported range of Pan and Tilt for tour spots.
#[yaserde(prefix = "tt", rename = "PanTiltPositionSpace")]
pub pan_tilt_position_space: Option<Space2DDescription>,
// Supported range of Zoom for a tour spot.
#[yaserde(prefix = "tt", rename = "ZoomPositionSpace")]
pub zoom_position_space: Option<Space1DDescription>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourPresetDetailOptionsExtension>,
}
impl Validate for PtzpresetTourPresetDetailOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourPresetDetailOptionsExtension {}
impl Validate for PtzpresetTourPresetDetailOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourStartingConditionOptions {
// Supported range of Recurring Time.
#[yaserde(prefix = "tt", rename = "RecurringTime")]
pub recurring_time: Option<IntRange>,
// Supported range of Recurring Duration.
#[yaserde(prefix = "tt", rename = "RecurringDuration")]
pub recurring_duration: Option<DurationRange>,
// Supported options for Direction of Preset Tour.
#[yaserde(prefix = "tt", rename = "Direction")]
pub direction: Vec<PtzpresetTourDirection>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PtzpresetTourStartingConditionOptionsExtension>,
}
impl Validate for PtzpresetTourStartingConditionOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpresetTourStartingConditionOptionsExtension {}
impl Validate for PtzpresetTourStartingConditionOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingStatus {
#[yaserde(prefix = "tt", rename = "FocusStatus")]
pub focus_status: FocusStatus,
}
impl Validate for ImagingStatus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusStatus {
// Status of focus position.
#[yaserde(prefix = "tt", rename = "Position")]
pub position: f64,
// Status of focus MoveStatus.
#[yaserde(prefix = "tt", rename = "MoveStatus")]
pub move_status: MoveStatus,
// Error status of focus.
#[yaserde(prefix = "tt", rename = "Error")]
pub error: String,
}
impl Validate for FocusStatus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusConfiguration {
#[yaserde(prefix = "tt", rename = "AutoFocusMode")]
pub auto_focus_mode: AutoFocusMode,
#[yaserde(prefix = "tt", rename = "DefaultSpeed")]
pub default_speed: f64,
// Parameter to set autofocus near limit (unit: meter).
#[yaserde(prefix = "tt", rename = "NearLimit")]
pub near_limit: f64,
// Parameter to set autofocus far limit (unit: meter).
// If set to 0.0, infinity will be used.
#[yaserde(prefix = "tt", rename = "FarLimit")]
pub far_limit: f64,
}
impl Validate for FocusConfiguration {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum AutoFocusMode {
#[yaserde(rename = "AUTO")]
Auto,
#[yaserde(rename = "MANUAL")]
Manual,
__Unknown__(String),
}
impl Default for AutoFocusMode {
fn default() -> AutoFocusMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for AutoFocusMode {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Afmodes {
// Focus of a moving camera is updated only once after stopping a pan, tilt
// or zoom movement.
OnceAfterMove,
__Unknown__(String),
}
impl Default for Afmodes {
fn default() -> Afmodes {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Afmodes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettings {
// Enabled/disabled BLC mode (on/off).
#[yaserde(prefix = "tt", rename = "BacklightCompensation")]
pub backlight_compensation: Option<BacklightCompensation>,
// Image brightness (unit unspecified).
#[yaserde(prefix = "tt", rename = "Brightness")]
pub brightness: Option<f64>,
// Color saturation of the image (unit unspecified).
#[yaserde(prefix = "tt", rename = "ColorSaturation")]
pub color_saturation: Option<f64>,
// Contrast of the image (unit unspecified).
#[yaserde(prefix = "tt", rename = "Contrast")]
pub contrast: Option<f64>,
// Exposure mode of the device.
#[yaserde(prefix = "tt", rename = "Exposure")]
pub exposure: Option<Exposure>,
// Focus configuration.
#[yaserde(prefix = "tt", rename = "Focus")]
pub focus: Option<FocusConfiguration>,
// Infrared Cutoff Filter settings.
#[yaserde(prefix = "tt", rename = "IrCutFilter")]
pub ir_cut_filter: Option<IrCutFilterMode>,
// Sharpness of the Video image.
#[yaserde(prefix = "tt", rename = "Sharpness")]
pub sharpness: Option<f64>,
// WDR settings.
#[yaserde(prefix = "tt", rename = "WideDynamicRange")]
pub wide_dynamic_range: Option<WideDynamicRange>,
// White balance settings.
#[yaserde(prefix = "tt", rename = "WhiteBalance")]
pub white_balance: Option<WhiteBalance>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingSettingsExtension>,
}
impl Validate for ImagingSettings {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettingsExtension {}
impl Validate for ImagingSettingsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Exposure {
// Exposure Mode
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: ExposureMode,
// The exposure priority mode (low noise/framerate).
#[yaserde(prefix = "tt", rename = "Priority")]
pub priority: ExposurePriority,
// Rectangular exposure mask.
#[yaserde(prefix = "tt", rename = "Window")]
pub window: Rectangle,
// Minimum value of exposure time range allowed to be used by the algorithm.
#[yaserde(prefix = "tt", rename = "MinExposureTime")]
pub min_exposure_time: f64,
// Maximum value of exposure time range allowed to be used by the algorithm.
#[yaserde(prefix = "tt", rename = "MaxExposureTime")]
pub max_exposure_time: f64,
// Minimum value of the sensor gain range that is allowed to be used by the
// algorithm.
#[yaserde(prefix = "tt", rename = "MinGain")]
pub min_gain: f64,
// Maximum value of the sensor gain range that is allowed to be used by the
// algorithm.
#[yaserde(prefix = "tt", rename = "MaxGain")]
pub max_gain: f64,
// Minimum value of the iris range allowed to be used by the algorithm.
#[yaserde(prefix = "tt", rename = "MinIris")]
pub min_iris: f64,
// Maximum value of the iris range allowed to be used by the algorithm.
#[yaserde(prefix = "tt", rename = "MaxIris")]
pub max_iris: f64,
// The fixed exposure time used by the image sensor (μs).
#[yaserde(prefix = "tt", rename = "ExposureTime")]
pub exposure_time: f64,
// The fixed gain used by the image sensor (dB).
#[yaserde(prefix = "tt", rename = "Gain")]
pub gain: f64,
// The fixed attenuation of input light affected by the iris (dB). 0dB maps
// to a fully opened iris.
#[yaserde(prefix = "tt", rename = "Iris")]
pub iris: f64,
}
impl Validate for Exposure {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum WideDynamicMode {
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "ON")]
On,
__Unknown__(String),
}
impl Default for WideDynamicMode {
fn default() -> WideDynamicMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for WideDynamicMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WideDynamicRange {
// White dynamic range (on/off)
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: WideDynamicMode,
// Optional level parameter (unitless)
#[yaserde(prefix = "tt", rename = "Level")]
pub level: f64,
}
impl Validate for WideDynamicRange {}
// Enumeration describing the available backlight compenstation modes.
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum BacklightCompensationMode {
// Backlight compensation is disabled.
#[yaserde(rename = "OFF")]
Off,
// Backlight compensation is enabled.
#[yaserde(rename = "ON")]
On,
__Unknown__(String),
}
impl Default for BacklightCompensationMode {
fn default() -> BacklightCompensationMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for BacklightCompensationMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct BacklightCompensation {
// Backlight compensation mode (on/off).
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: BacklightCompensationMode,
// Optional level parameter (unit unspecified).
#[yaserde(prefix = "tt", rename = "Level")]
pub level: f64,
}
impl Validate for BacklightCompensation {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ExposurePriority {
LowNoise,
FrameRate,
__Unknown__(String),
}
impl Default for ExposurePriority {
fn default() -> ExposurePriority {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ExposurePriority {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingOptions {
#[yaserde(prefix = "tt", rename = "BacklightCompensation")]
pub backlight_compensation: BacklightCompensationOptions,
#[yaserde(prefix = "tt", rename = "Brightness")]
pub brightness: FloatRange,
#[yaserde(prefix = "tt", rename = "ColorSaturation")]
pub color_saturation: FloatRange,
#[yaserde(prefix = "tt", rename = "Contrast")]
pub contrast: FloatRange,
#[yaserde(prefix = "tt", rename = "Exposure")]
pub exposure: ExposureOptions,
#[yaserde(prefix = "tt", rename = "Focus")]
pub focus: FocusOptions,
#[yaserde(prefix = "tt", rename = "IrCutFilterModes")]
pub ir_cut_filter_modes: Vec<IrCutFilterMode>,
#[yaserde(prefix = "tt", rename = "Sharpness")]
pub sharpness: FloatRange,
#[yaserde(prefix = "tt", rename = "WideDynamicRange")]
pub wide_dynamic_range: WideDynamicRangeOptions,
#[yaserde(prefix = "tt", rename = "WhiteBalance")]
pub white_balance: WhiteBalanceOptions,
}
impl Validate for ImagingOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WideDynamicRangeOptions {
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<WideDynamicMode>,
#[yaserde(prefix = "tt", rename = "Level")]
pub level: FloatRange,
}
impl Validate for WideDynamicRangeOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct BacklightCompensationOptions {
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<WideDynamicMode>,
#[yaserde(prefix = "tt", rename = "Level")]
pub level: FloatRange,
}
impl Validate for BacklightCompensationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusOptions {
#[yaserde(prefix = "tt", rename = "AutoFocusModes")]
pub auto_focus_modes: Vec<AutoFocusMode>,
#[yaserde(prefix = "tt", rename = "DefaultSpeed")]
pub default_speed: FloatRange,
#[yaserde(prefix = "tt", rename = "NearLimit")]
pub near_limit: FloatRange,
#[yaserde(prefix = "tt", rename = "FarLimit")]
pub far_limit: FloatRange,
}
impl Validate for FocusOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ExposureOptions {
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<ExposureMode>,
#[yaserde(prefix = "tt", rename = "Priority")]
pub priority: Vec<ExposurePriority>,
#[yaserde(prefix = "tt", rename = "MinExposureTime")]
pub min_exposure_time: FloatRange,
#[yaserde(prefix = "tt", rename = "MaxExposureTime")]
pub max_exposure_time: FloatRange,
#[yaserde(prefix = "tt", rename = "MinGain")]
pub min_gain: FloatRange,
#[yaserde(prefix = "tt", rename = "MaxGain")]
pub max_gain: FloatRange,
#[yaserde(prefix = "tt", rename = "MinIris")]
pub min_iris: FloatRange,
#[yaserde(prefix = "tt", rename = "MaxIris")]
pub max_iris: FloatRange,
#[yaserde(prefix = "tt", rename = "ExposureTime")]
pub exposure_time: FloatRange,
#[yaserde(prefix = "tt", rename = "Gain")]
pub gain: FloatRange,
#[yaserde(prefix = "tt", rename = "Iris")]
pub iris: FloatRange,
}
impl Validate for ExposureOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WhiteBalanceOptions {
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<WhiteBalanceMode>,
#[yaserde(prefix = "tt", rename = "YrGain")]
pub yr_gain: FloatRange,
#[yaserde(prefix = "tt", rename = "YbGain")]
pub yb_gain: FloatRange,
}
impl Validate for WhiteBalanceOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusMove {
// Parameters for the absolute focus control.
#[yaserde(prefix = "tt", rename = "Absolute")]
pub absolute: Option<AbsoluteFocus>,
// Parameters for the relative focus control.
#[yaserde(prefix = "tt", rename = "Relative")]
pub relative: Option<RelativeFocus>,
// Parameter for the continuous focus control.
#[yaserde(prefix = "tt", rename = "Continuous")]
pub continuous: Option<ContinuousFocus>,
}
impl Validate for FocusMove {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AbsoluteFocus {
// Position parameter for the absolute focus control.
#[yaserde(prefix = "tt", rename = "Position")]
pub position: f64,
// Speed parameter for the absolute focus control.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: Option<f64>,
}
impl Validate for AbsoluteFocus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RelativeFocus {
// Distance parameter for the relative focus control.
#[yaserde(prefix = "tt", rename = "Distance")]
pub distance: f64,
// Speed parameter for the relative focus control.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: Option<f64>,
}
impl Validate for RelativeFocus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ContinuousFocus {
// Speed parameter for the Continuous focus control.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: f64,
}
impl Validate for ContinuousFocus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MoveOptions {
#[yaserde(prefix = "tt", rename = "Absolute")]
pub absolute: Option<AbsoluteFocusOptions>,
#[yaserde(prefix = "tt", rename = "Relative")]
pub relative: Option<RelativeFocusOptions>,
#[yaserde(prefix = "tt", rename = "Continuous")]
pub continuous: Option<ContinuousFocusOptions>,
}
impl Validate for MoveOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AbsoluteFocusOptions {
// Valid ranges of the position.
#[yaserde(prefix = "tt", rename = "Position")]
pub position: FloatRange,
// Valid ranges of the speed.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: Option<FloatRange>,
}
impl Validate for AbsoluteFocusOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RelativeFocusOptions {
// Valid ranges of the distance.
#[yaserde(prefix = "tt", rename = "Distance")]
pub distance: FloatRange,
// Valid ranges of the speed.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: FloatRange,
}
impl Validate for RelativeFocusOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ContinuousFocusOptions {
// Valid ranges of the speed.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: FloatRange,
}
impl Validate for ContinuousFocusOptions {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ExposureMode {
#[yaserde(rename = "AUTO")]
Auto,
#[yaserde(rename = "MANUAL")]
Manual,
__Unknown__(String),
}
impl Default for ExposureMode {
fn default() -> ExposureMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ExposureMode {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Enabled {
#[yaserde(rename = "ENABLED")]
Enabled,
#[yaserde(rename = "DISABLED")]
Disabled,
__Unknown__(String),
}
impl Default for Enabled {
fn default() -> Enabled {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Enabled {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum WhiteBalanceMode {
#[yaserde(rename = "AUTO")]
Auto,
#[yaserde(rename = "MANUAL")]
Manual,
__Unknown__(String),
}
impl Default for WhiteBalanceMode {
fn default() -> WhiteBalanceMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for WhiteBalanceMode {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum IrCutFilterMode {
#[yaserde(rename = "ON")]
On,
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "AUTO")]
Auto,
__Unknown__(String),
}
impl Default for IrCutFilterMode {
fn default() -> IrCutFilterMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for IrCutFilterMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WhiteBalance {
// Auto whitebalancing mode (auto/manual).
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: WhiteBalanceMode,
// Rgain (unitless).
#[yaserde(prefix = "tt", rename = "CrGain")]
pub cr_gain: f64,
// Bgain (unitless).
#[yaserde(prefix = "tt", rename = "CbGain")]
pub cb_gain: f64,
}
impl Validate for WhiteBalance {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingStatus20 {
// Status of focus.
#[yaserde(prefix = "tt", rename = "FocusStatus20")]
pub focus_status_20: Option<FocusStatus20>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingStatus20Extension>,
}
impl Validate for ImagingStatus20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingStatus20Extension {}
impl Validate for ImagingStatus20Extension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusStatus20 {
// Status of focus position.
#[yaserde(prefix = "tt", rename = "Position")]
pub position: f64,
// Status of focus MoveStatus.
#[yaserde(prefix = "tt", rename = "MoveStatus")]
pub move_status: MoveStatus,
// Error status of focus.
#[yaserde(prefix = "tt", rename = "Error")]
pub error: Option<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<FocusStatus20Extension>,
}
impl Validate for FocusStatus20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusStatus20Extension {}
impl Validate for FocusStatus20Extension {}
// Type describing the ImagingSettings of a VideoSource. The supported options
// and ranges can be obtained via the GetOptions command.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettings20 {
// Enabled/disabled BLC mode (on/off).
#[yaserde(prefix = "tt", rename = "BacklightCompensation")]
pub backlight_compensation: Option<BacklightCompensation20>,
// Image brightness (unit unspecified).
#[yaserde(prefix = "tt", rename = "Brightness")]
pub brightness: Option<f64>,
// Color saturation of the image (unit unspecified).
#[yaserde(prefix = "tt", rename = "ColorSaturation")]
pub color_saturation: Option<f64>,
// Contrast of the image (unit unspecified).
#[yaserde(prefix = "tt", rename = "Contrast")]
pub contrast: Option<f64>,
// Exposure mode of the device.
#[yaserde(prefix = "tt", rename = "Exposure")]
pub exposure: Option<Exposure20>,
// Focus configuration.
#[yaserde(prefix = "tt", rename = "Focus")]
pub focus: Option<FocusConfiguration20>,
// Infrared Cutoff Filter settings.
#[yaserde(prefix = "tt", rename = "IrCutFilter")]
pub ir_cut_filter: Option<IrCutFilterMode>,
// Sharpness of the Video image.
#[yaserde(prefix = "tt", rename = "Sharpness")]
pub sharpness: Option<f64>,
// WDR settings.
#[yaserde(prefix = "tt", rename = "WideDynamicRange")]
pub wide_dynamic_range: Option<WideDynamicRange20>,
// White balance settings.
#[yaserde(prefix = "tt", rename = "WhiteBalance")]
pub white_balance: Option<WhiteBalance20>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingSettingsExtension20>,
}
impl Validate for ImagingSettings20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettingsExtension20 {
// Optional element to configure Image Stabilization feature.
#[yaserde(prefix = "tt", rename = "ImageStabilization")]
pub image_stabilization: Option<ImageStabilization>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingSettingsExtension202>,
}
impl Validate for ImagingSettingsExtension20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettingsExtension202 {
// An optional parameter applied to only auto mode to adjust timing of
// toggling Ir cut filter.
#[yaserde(prefix = "tt", rename = "IrCutFilterAutoAdjustment")]
pub ir_cut_filter_auto_adjustment: Vec<IrCutFilterAutoAdjustment>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingSettingsExtension203>,
}
impl Validate for ImagingSettingsExtension202 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettingsExtension203 {
// Optional element to configure Image Contrast Compensation.
#[yaserde(prefix = "tt", rename = "ToneCompensation")]
pub tone_compensation: Option<ToneCompensation>,
// Optional element to configure Image Defogging.
#[yaserde(prefix = "tt", rename = "Defogging")]
pub defogging: Option<Defogging>,
// Optional element to configure Image Noise Reduction.
#[yaserde(prefix = "tt", rename = "NoiseReduction")]
pub noise_reduction: Option<NoiseReduction>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingSettingsExtension204>,
}
impl Validate for ImagingSettingsExtension203 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingSettingsExtension204 {}
impl Validate for ImagingSettingsExtension204 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImageStabilization {
// Parameter to enable/disable Image Stabilization feature.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: ImageStabilizationMode,
// Optional level parameter (unit unspecified)
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<f64>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImageStabilizationExtension>,
}
impl Validate for ImageStabilization {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImageStabilizationExtension {}
impl Validate for ImageStabilizationExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ImageStabilizationMode {
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "ON")]
On,
#[yaserde(rename = "AUTO")]
Auto,
Extended,
__Unknown__(String),
}
impl Default for ImageStabilizationMode {
fn default() -> ImageStabilizationMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ImageStabilizationMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IrCutFilterAutoAdjustment {
// Specifies which boundaries to automatically toggle Ir cut filter
// following parameters are applied to. Its options shall be chosen from
// tt:IrCutFilterAutoBoundaryType.
#[yaserde(prefix = "tt", rename = "BoundaryType")]
pub boundary_type: String,
// Adjusts boundary exposure level for toggling Ir cut filter to on/off
// specified with unitless normalized value from +1.0 to -1.0. Zero is
// default and -1.0 is the darkest adjustment (Unitless).
#[yaserde(prefix = "tt", rename = "BoundaryOffset")]
pub boundary_offset: Option<f64>,
// Delay time of toggling Ir cut filter to on/off after crossing of the
// boundary exposure levels.
#[yaserde(prefix = "tt", rename = "ResponseTime")]
pub response_time: Option<xs::Duration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<IrCutFilterAutoAdjustmentExtension>,
}
impl Validate for IrCutFilterAutoAdjustment {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IrCutFilterAutoAdjustmentExtension {}
impl Validate for IrCutFilterAutoAdjustmentExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum IrCutFilterAutoBoundaryType {
Common,
ToOn,
ToOff,
Extended,
__Unknown__(String),
}
impl Default for IrCutFilterAutoBoundaryType {
fn default() -> IrCutFilterAutoBoundaryType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for IrCutFilterAutoBoundaryType {}
// Type describing whether WDR mode is enabled or disabled (on/off).
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WideDynamicRange20 {
// Wide dynamic range mode (on/off).
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: WideDynamicMode,
// Optional level parameter (unit unspecified).
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<f64>,
}
impl Validate for WideDynamicRange20 {}
// Type describing whether BLC mode is enabled or disabled (on/off).
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct BacklightCompensation20 {
// Backlight compensation mode (on/off).
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: BacklightCompensationMode,
// Optional level parameter (unit unspecified).
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<f64>,
}
impl Validate for BacklightCompensation20 {}
// Type describing the exposure settings.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Exposure20 {
// Exposure Mode
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: ExposureMode,
// The exposure priority mode (low noise/framerate).
#[yaserde(prefix = "tt", rename = "Priority")]
pub priority: Option<ExposurePriority>,
// Rectangular exposure mask.
#[yaserde(prefix = "tt", rename = "Window")]
pub window: Option<Rectangle>,
// Minimum value of exposure time range allowed to be used by the algorithm.
#[yaserde(prefix = "tt", rename = "MinExposureTime")]
pub min_exposure_time: Option<f64>,
// Maximum value of exposure time range allowed to be used by the algorithm.
#[yaserde(prefix = "tt", rename = "MaxExposureTime")]
pub max_exposure_time: Option<f64>,
// Minimum value of the sensor gain range that is allowed to be used by the
// algorithm.
#[yaserde(prefix = "tt", rename = "MinGain")]
pub min_gain: Option<f64>,
// Maximum value of the sensor gain range that is allowed to be used by the
// algorithm.
#[yaserde(prefix = "tt", rename = "MaxGain")]
pub max_gain: Option<f64>,
// Minimum value of the iris range allowed to be used by the algorithm. 0dB
// maps to a fully opened iris and positive values map to higher
// attenuation.
#[yaserde(prefix = "tt", rename = "MinIris")]
pub min_iris: Option<f64>,
// Maximum value of the iris range allowed to be used by the algorithm. 0dB
// maps to a fully opened iris and positive values map to higher
// attenuation.
#[yaserde(prefix = "tt", rename = "MaxIris")]
pub max_iris: Option<f64>,
// The fixed exposure time used by the image sensor (μs).
#[yaserde(prefix = "tt", rename = "ExposureTime")]
pub exposure_time: Option<f64>,
// The fixed gain used by the image sensor (dB).
#[yaserde(prefix = "tt", rename = "Gain")]
pub gain: Option<f64>,
// The fixed attenuation of input light affected by the iris (dB). 0dB maps
// to a fully opened iris and positive values map to higher attenuation.
#[yaserde(prefix = "tt", rename = "Iris")]
pub iris: Option<f64>,
}
impl Validate for Exposure20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ToneCompensation {
// Parameter to enable/disable or automatic ToneCompensation feature. Its
// options shall be chosen from tt:ToneCompensationMode Type.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: String,
// Optional level parameter specified with unitless normalized value from
// 0.0 to +1.0.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<f64>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ToneCompensationExtension>,
}
impl Validate for ToneCompensation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ToneCompensationExtension {}
impl Validate for ToneCompensationExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ToneCompensationMode {
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "ON")]
On,
#[yaserde(rename = "AUTO")]
Auto,
__Unknown__(String),
}
impl Default for ToneCompensationMode {
fn default() -> ToneCompensationMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ToneCompensationMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Defogging {
// Parameter to enable/disable or automatic Defogging feature. Its options
// shall be chosen from tt:DefoggingMode Type.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: String,
// Optional level parameter specified with unitless normalized value from
// 0.0 to +1.0.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<f64>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<DefoggingExtension>,
}
impl Validate for Defogging {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DefoggingExtension {}
impl Validate for DefoggingExtension {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum DefoggingMode {
#[yaserde(rename = "OFF")]
Off,
#[yaserde(rename = "ON")]
On,
#[yaserde(rename = "AUTO")]
Auto,
__Unknown__(String),
}
impl Default for DefoggingMode {
fn default() -> DefoggingMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for DefoggingMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NoiseReduction {
// Level parameter specified with unitless normalized value from 0.0 to
// +1.0. Level=0 means no noise reduction or minimal noise reduction.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: f64,
}
impl Validate for NoiseReduction {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingOptions20 {
// Valid range of Backlight Compensation.
#[yaserde(prefix = "tt", rename = "BacklightCompensation")]
pub backlight_compensation: Option<BacklightCompensationOptions20>,
// Valid range of Brightness.
#[yaserde(prefix = "tt", rename = "Brightness")]
pub brightness: Option<FloatRange>,
// Valid range of Color Saturation.
#[yaserde(prefix = "tt", rename = "ColorSaturation")]
pub color_saturation: Option<FloatRange>,
// Valid range of Contrast.
#[yaserde(prefix = "tt", rename = "Contrast")]
pub contrast: Option<FloatRange>,
// Valid range of Exposure.
#[yaserde(prefix = "tt", rename = "Exposure")]
pub exposure: Option<ExposureOptions20>,
// Valid range of Focus.
#[yaserde(prefix = "tt", rename = "Focus")]
pub focus: Option<FocusOptions20>,
// Valid range of IrCutFilterModes.
#[yaserde(prefix = "tt", rename = "IrCutFilterModes")]
pub ir_cut_filter_modes: Vec<IrCutFilterMode>,
// Valid range of Sharpness.
#[yaserde(prefix = "tt", rename = "Sharpness")]
pub sharpness: Option<FloatRange>,
// Valid range of WideDynamicRange.
#[yaserde(prefix = "tt", rename = "WideDynamicRange")]
pub wide_dynamic_range: Option<WideDynamicRangeOptions20>,
// Valid range of WhiteBalance.
#[yaserde(prefix = "tt", rename = "WhiteBalance")]
pub white_balance: Option<WhiteBalanceOptions20>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingOptions20Extension>,
}
impl Validate for ImagingOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingOptions20Extension {
// Options of parameters for Image Stabilization feature.
#[yaserde(prefix = "tt", rename = "ImageStabilization")]
pub image_stabilization: Option<ImageStabilizationOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingOptions20Extension2>,
}
impl Validate for ImagingOptions20Extension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingOptions20Extension2 {
// Options of parameters for adjustment of Ir cut filter auto mode.
#[yaserde(prefix = "tt", rename = "IrCutFilterAutoAdjustment")]
pub ir_cut_filter_auto_adjustment: Option<IrCutFilterAutoAdjustmentOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingOptions20Extension3>,
}
impl Validate for ImagingOptions20Extension2 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingOptions20Extension3 {
// Options of parameters for Tone Compensation feature.
#[yaserde(prefix = "tt", rename = "ToneCompensationOptions")]
pub tone_compensation_options: Option<ToneCompensationOptions>,
// Options of parameters for Defogging feature.
#[yaserde(prefix = "tt", rename = "DefoggingOptions")]
pub defogging_options: Option<DefoggingOptions>,
// Options of parameter for Noise Reduction feature.
#[yaserde(prefix = "tt", rename = "NoiseReductionOptions")]
pub noise_reduction_options: Option<NoiseReductionOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImagingOptions20Extension4>,
}
impl Validate for ImagingOptions20Extension3 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImagingOptions20Extension4 {}
impl Validate for ImagingOptions20Extension4 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImageStabilizationOptions {
// Supported options of Image Stabilization mode parameter.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<ImageStabilizationMode>,
// Valid range of the Image Stabilization.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<FloatRange>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ImageStabilizationOptionsExtension>,
}
impl Validate for ImageStabilizationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ImageStabilizationOptionsExtension {}
impl Validate for ImageStabilizationOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IrCutFilterAutoAdjustmentOptions {
// Supported options of boundary types for adjustment of Ir cut filter auto
// mode. The opptions shall be chosen from tt:IrCutFilterAutoBoundaryType.
#[yaserde(prefix = "tt", rename = "BoundaryType")]
pub boundary_type: Vec<String>,
// Indicates whether or not boundary offset for toggling Ir cut filter is
// supported.
#[yaserde(prefix = "tt", rename = "BoundaryOffset")]
pub boundary_offset: Option<bool>,
// Supported range of delay time for toggling Ir cut filter.
#[yaserde(prefix = "tt", rename = "ResponseTimeRange")]
pub response_time_range: Option<DurationRange>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<IrCutFilterAutoAdjustmentOptionsExtension>,
}
impl Validate for IrCutFilterAutoAdjustmentOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct IrCutFilterAutoAdjustmentOptionsExtension {}
impl Validate for IrCutFilterAutoAdjustmentOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WideDynamicRangeOptions20 {
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<WideDynamicMode>,
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<FloatRange>,
}
impl Validate for WideDynamicRangeOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct BacklightCompensationOptions20 {
// 'ON' or 'OFF'
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<BacklightCompensationMode>,
// Level range of BacklightCompensation.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: Option<FloatRange>,
}
impl Validate for BacklightCompensationOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ExposureOptions20 {
// Exposure Mode
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<ExposureMode>,
// The exposure priority mode (low noise/framerate).
#[yaserde(prefix = "tt", rename = "Priority")]
pub priority: Vec<ExposurePriority>,
// Valid range of the Minimum ExposureTime.
#[yaserde(prefix = "tt", rename = "MinExposureTime")]
pub min_exposure_time: Option<FloatRange>,
// Valid range of the Maximum ExposureTime.
#[yaserde(prefix = "tt", rename = "MaxExposureTime")]
pub max_exposure_time: Option<FloatRange>,
// Valid range of the Minimum Gain.
#[yaserde(prefix = "tt", rename = "MinGain")]
pub min_gain: Option<FloatRange>,
// Valid range of the Maximum Gain.
#[yaserde(prefix = "tt", rename = "MaxGain")]
pub max_gain: Option<FloatRange>,
// Valid range of the Minimum Iris.
#[yaserde(prefix = "tt", rename = "MinIris")]
pub min_iris: Option<FloatRange>,
// Valid range of the Maximum Iris.
#[yaserde(prefix = "tt", rename = "MaxIris")]
pub max_iris: Option<FloatRange>,
// Valid range of the ExposureTime.
#[yaserde(prefix = "tt", rename = "ExposureTime")]
pub exposure_time: Option<FloatRange>,
// Valid range of the Gain.
#[yaserde(prefix = "tt", rename = "Gain")]
pub gain: Option<FloatRange>,
// Valid range of the Iris.
#[yaserde(prefix = "tt", rename = "Iris")]
pub iris: Option<FloatRange>,
}
impl Validate for ExposureOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MoveOptions20 {
// Valid ranges for the absolute control.
#[yaserde(prefix = "tt", rename = "Absolute")]
pub absolute: Option<AbsoluteFocusOptions>,
// Valid ranges for the relative control.
#[yaserde(prefix = "tt", rename = "Relative")]
pub relative: Option<RelativeFocusOptions20>,
// Valid ranges for the continuous control.
#[yaserde(prefix = "tt", rename = "Continuous")]
pub continuous: Option<ContinuousFocusOptions>,
}
impl Validate for MoveOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RelativeFocusOptions20 {
// Valid ranges of the distance.
#[yaserde(prefix = "tt", rename = "Distance")]
pub distance: FloatRange,
// Valid ranges of the speed.
#[yaserde(prefix = "tt", rename = "Speed")]
pub speed: Option<FloatRange>,
}
impl Validate for RelativeFocusOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WhiteBalance20 {
// 'AUTO' or 'MANUAL'
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: WhiteBalanceMode,
// Rgain (unitless).
#[yaserde(prefix = "tt", rename = "CrGain")]
pub cr_gain: Option<f64>,
// Bgain (unitless).
#[yaserde(prefix = "tt", rename = "CbGain")]
pub cb_gain: Option<f64>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<WhiteBalance20Extension>,
}
impl Validate for WhiteBalance20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WhiteBalance20Extension {}
impl Validate for WhiteBalance20Extension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusConfiguration20 {
// Mode of auto focus.
#[yaserde(prefix = "tt", rename = "AutoFocusMode")]
pub auto_focus_mode: AutoFocusMode,
#[yaserde(prefix = "tt", rename = "DefaultSpeed")]
pub default_speed: Option<f64>,
// Parameter to set autofocus near limit (unit: meter).
#[yaserde(prefix = "tt", rename = "NearLimit")]
pub near_limit: Option<f64>,
// Parameter to set autofocus far limit (unit: meter).
#[yaserde(prefix = "tt", rename = "FarLimit")]
pub far_limit: Option<f64>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<FocusConfiguration20Extension>,
// Zero or more modes as defined in enumeration tt:AFModes.
#[yaserde(attribute, rename = "AFMode")]
pub af_mode: Option<StringAttrList>,
}
impl Validate for FocusConfiguration20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusConfiguration20Extension {}
impl Validate for FocusConfiguration20Extension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WhiteBalanceOptions20 {
// Mode of WhiteBalance.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<WhiteBalanceMode>,
#[yaserde(prefix = "tt", rename = "YrGain")]
pub yr_gain: Option<FloatRange>,
#[yaserde(prefix = "tt", rename = "YbGain")]
pub yb_gain: Option<FloatRange>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<WhiteBalanceOptions20Extension>,
}
impl Validate for WhiteBalanceOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct WhiteBalanceOptions20Extension {}
impl Validate for WhiteBalanceOptions20Extension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusOptions20 {
// Supported modes for auto focus.
#[yaserde(prefix = "tt", rename = "AutoFocusModes")]
pub auto_focus_modes: Vec<AutoFocusMode>,
// Valid range of DefaultSpeed.
#[yaserde(prefix = "tt", rename = "DefaultSpeed")]
pub default_speed: Option<FloatRange>,
// Valid range of NearLimit.
#[yaserde(prefix = "tt", rename = "NearLimit")]
pub near_limit: Option<FloatRange>,
// Valid range of FarLimit.
#[yaserde(prefix = "tt", rename = "FarLimit")]
pub far_limit: Option<FloatRange>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<FocusOptions20Extension>,
}
impl Validate for FocusOptions20 {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FocusOptions20Extension {
// Supported options for auto focus. Options shall be chosen from
// tt:AFModes.
#[yaserde(prefix = "tt", rename = "AFModes")]
pub af_modes: Option<StringAttrList>,
}
impl Validate for FocusOptions20Extension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ToneCompensationOptions {
// Supported options for Tone Compensation mode. Its options shall be chosen
// from tt:ToneCompensationMode Type.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<String>,
// Indicates whether or not support Level parameter for Tone Compensation.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: bool,
}
impl Validate for ToneCompensationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DefoggingOptions {
// Supported options for Defogging mode. Its options shall be chosen from
// tt:DefoggingMode Type.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: Vec<String>,
// Indicates whether or not support Level parameter for Defogging.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: bool,
}
impl Validate for DefoggingOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct NoiseReductionOptions {
// Indicates whether or not support Level parameter for NoiseReduction.
#[yaserde(prefix = "tt", rename = "Level")]
pub level: bool,
}
impl Validate for NoiseReductionOptions {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct TopicNamespaceLocation(pub String);
impl Validate for TopicNamespaceLocation {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum PropertyOperation {
Initialized,
Deleted,
Changed,
__Unknown__(String),
}
impl Default for PropertyOperation {
fn default() -> PropertyOperation {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for PropertyOperation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Message {
// Token value pairs that triggered this message. Typically only one item is
// present.
#[yaserde(prefix = "tt", rename = "Source")]
pub source: Option<ItemList>,
#[yaserde(prefix = "tt", rename = "Key")]
pub key: Option<ItemList>,
#[yaserde(prefix = "tt", rename = "Data")]
pub data: Option<ItemList>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MessageExtension>,
#[yaserde(attribute, rename = "UtcTime")]
pub utc_time: xs::DateTime,
#[yaserde(attribute, rename = "PropertyOperation")]
pub property_operation: Option<PropertyOperation>,
}
impl Validate for Message {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MessageExtension {}
impl Validate for MessageExtension {}
// List of parameters according to the corresponding ItemListDescription.
// Each item in the list shall have a unique name.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ItemList {
// Value name pair as defined by the corresponding description.
#[yaserde(prefix = "tt", rename = "SimpleItem")]
pub simple_item: Vec<item_list::SimpleItemType>,
// Complex value structure.
#[yaserde(prefix = "tt", rename = "ElementItem")]
pub element_item: Vec<item_list::ElementItemType>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ItemListExtension>,
}
impl Validate for ItemList {}
pub mod item_list {
use super::*;
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SimpleItemType {
// Item name.
#[yaserde(attribute, rename = "Name")]
pub name: String,
// Item value. The type is defined in the corresponding description.
#[yaserde(attribute, rename = "Value")]
pub value: String,
}
impl Validate for SimpleItemType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ElementItemType {
// Item name.
#[yaserde(attribute, rename = "Name")]
pub name: String,
}
impl Validate for ElementItemType {}
}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ItemListExtension {}
impl Validate for ItemListExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MessageDescription {
// Set of tokens producing this message. The list may only contain
// SimpleItemDescription items.
// The set of tokens identify the component within the WS-Endpoint, which is
// responsible for the producing the message.
#[yaserde(prefix = "tt", rename = "Source")]
pub source: Option<ItemListDescription>,
// Describes optional message payload parameters that may be used as key.
// E.g. object IDs of tracked objects are conveyed as key.
#[yaserde(prefix = "tt", rename = "Key")]
pub key: Option<ItemListDescription>,
// Describes the payload of the message.
#[yaserde(prefix = "tt", rename = "Data")]
pub data: Option<ItemListDescription>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MessageDescriptionExtension>,
// Must be set to true when the described Message relates to a property. An
// alternative term of "property" is a "state" in contrast to a pure event,
// which contains relevant information for only a single point in time.
#[yaserde(attribute, rename = "IsProperty")]
pub is_property: Option<bool>,
}
impl Validate for MessageDescription {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MessageDescriptionExtension {}
impl Validate for MessageDescriptionExtension {}
// Describes a list of items. Each item in the list shall have a unique name.
// The list is designed as linear structure without optional or unbounded
// elements.
// Use ElementItems only when complex structures are inevitable.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ItemListDescription {
// Description of a simple item. The type must be of cathegory simpleType
// (xs:string, xs:integer, xs:float, ...).
#[yaserde(prefix = "tt", rename = "SimpleItemDescription")]
pub simple_item_description: Vec<item_list_description::SimpleItemDescriptionType>,
// Description of a complex type. The Type must reference a defined type.
#[yaserde(prefix = "tt", rename = "ElementItemDescription")]
pub element_item_description: Vec<item_list_description::ElementItemDescriptionType>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ItemListDescriptionExtension>,
}
impl Validate for ItemListDescription {}
pub mod item_list_description {
use super::*;
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SimpleItemDescriptionType {
// Item name. Must be unique within a list.
#[yaserde(attribute, rename = "Name")]
pub name: String,
#[yaserde(attribute, rename = "Type")]
pub _type: String,
}
impl Validate for SimpleItemDescriptionType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ElementItemDescriptionType {
// Item name. Must be unique within a list.
#[yaserde(attribute, rename = "Name")]
pub name: String,
// The type of the item. The Type must reference a defined type.
#[yaserde(attribute, rename = "Type")]
pub _type: String,
}
impl Validate for ElementItemDescriptionType {}
}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ItemListDescriptionExtension {}
impl Validate for ItemListDescriptionExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Polyline {
#[yaserde(prefix = "tt", rename = "Point")]
pub point: Vec<Vector>,
}
impl Validate for Polyline {}
// pub type Polyline = Polyline;
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Direction {
Left,
Right,
Any,
__Unknown__(String),
}
impl Default for Direction {
fn default() -> Direction {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Direction {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngineConfiguration {
#[yaserde(prefix = "tt", rename = "AnalyticsModule")]
pub analytics_module: Vec<Config>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AnalyticsEngineConfigurationExtension>,
}
impl Validate for AnalyticsEngineConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngineConfigurationExtension {}
impl Validate for AnalyticsEngineConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RuleEngineConfiguration {
#[yaserde(prefix = "tt", rename = "Rule")]
pub rule: Vec<Config>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RuleEngineConfigurationExtension>,
}
impl Validate for RuleEngineConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RuleEngineConfigurationExtension {}
impl Validate for RuleEngineConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Config {
// List of configuration parameters as defined in the correspding
// description.
#[yaserde(prefix = "tt", rename = "Parameters")]
pub parameters: ItemList,
// Name of the configuration.
#[yaserde(attribute, rename = "Name")]
pub name: String,
// The Type attribute specifies the type of rule and shall be equal to value
// of one of Name attributes of ConfigDescription elements returned by
// GetSupportedRules and GetSupportedAnalyticsModules command.
#[yaserde(attribute, rename = "Type")]
pub _type: String,
}
impl Validate for Config {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ConfigDescription {
// List describing the configuration parameters. The names of the parameters
// must be unique. If possible SimpleItems
// should be used to transport the information to ease parsing of
// dynamically defined messages by a client
// application.
#[yaserde(prefix = "tt", rename = "Parameters")]
pub parameters: ItemListDescription,
// The analytics modules and rule engine produce Events, which must be
// listed within the Analytics Module Description. In order to do so
// the structure of the Message is defined and consists of three groups:
// Source, Key, and Data. It is recommended to use SimpleItemDescriptions
// wherever applicable.
// The name of all Items must be unique within all Items contained in any
// group of this Message.
// Depending on the component multiple parameters or none may be needed to
// identify the component uniquely.
#[yaserde(prefix = "tt", rename = "Messages")]
pub messages: Vec<config_description::MessagesType>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ConfigDescriptionExtension>,
// The Name attribute (e.g. "tt::LineDetector") uniquely identifies the type
// of rule, not a type definition in a schema.
#[yaserde(attribute, rename = "Name")]
pub name: String,
// The fixed attribute signals that it is not allowed to add or remove this
// type of configuration.
#[yaserde(attribute, rename = "fixed")]
pub fixed: Option<bool>,
// The maxInstances attribute signals the maximum number of instances per
// configuration.
#[yaserde(attribute, rename = "maxInstances")]
pub max_instances: Option<xs::Integer>,
}
impl Validate for ConfigDescription {}
pub mod config_description {
use super::*;
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MessagesType {
// The ParentTopic labels the message (e.g. "nn:RuleEngine/LineCrossing").
// The real message can extend the ParentTopic
// by for example the name of the instaniated rule (e.g.
// "nn:RuleEngine/LineCrossing/corssMyFirstLine").
// Even without knowing the complete topic name, the subscriber will be able
// to distiguish the
// messages produced by different rule instances of the same type via the
// Source fields of the message.
// There the name of the rule instance, which produced the message, must be
// listed.
#[yaserde(prefix = "tt", rename = "ParentTopic")]
pub parent_topic: String,
// Set of tokens producing this message. The list may only contain
// SimpleItemDescription items.
// The set of tokens identify the component within the WS-Endpoint, which is
// responsible for the producing the message.
#[yaserde(prefix = "tt", rename = "Source")]
pub source: Option<ItemListDescription>,
// Describes optional message payload parameters that may be used as key.
// E.g. object IDs of tracked objects are conveyed as key.
#[yaserde(prefix = "tt", rename = "Key")]
pub key: Option<ItemListDescription>,
// Describes the payload of the message.
#[yaserde(prefix = "tt", rename = "Data")]
pub data: Option<ItemListDescription>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MessageDescriptionExtension>,
// Must be set to true when the described Message relates to a property. An
// alternative term of "property" is a "state" in contrast to a pure event,
// which contains relevant information for only a single point in time.
#[yaserde(attribute, rename = "IsProperty")]
pub is_property: Option<bool>,
}
impl Validate for MessagesType {}
}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ConfigDescriptionExtension {}
impl Validate for ConfigDescriptionExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SupportedRules {
// Lists the location of all schemas that are referenced in the rules.
#[yaserde(prefix = "tt", rename = "RuleContentSchemaLocation")]
pub rule_content_schema_location: Vec<String>,
// List of rules supported by the Video Analytics configuration..
#[yaserde(prefix = "tt", rename = "RuleDescription")]
pub rule_description: Vec<ConfigDescription>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SupportedRulesExtension>,
}
impl Validate for SupportedRules {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SupportedRulesExtension {}
impl Validate for SupportedRulesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SupportedAnalyticsModules {
// It optionally contains a list of URLs that provide the location of schema
// files.
// These schema files describe the types and elements used in the analytics
// module descriptions.
// Analytics module descriptions that reference types or elements imported
// from any ONVIF defined schema files
// need not explicitly list those schema files.
#[yaserde(prefix = "tt", rename = "AnalyticsModuleContentSchemaLocation")]
pub analytics_module_content_schema_location: Vec<String>,
#[yaserde(prefix = "tt", rename = "AnalyticsModuleDescription")]
pub analytics_module_description: Vec<ConfigDescription>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SupportedAnalyticsModulesExtension>,
}
impl Validate for SupportedAnalyticsModules {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SupportedAnalyticsModulesExtension {}
impl Validate for SupportedAnalyticsModulesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PolygonConfiguration {
// Contains Polygon configuration for rule parameters
#[yaserde(prefix = "tt", rename = "Polygon")]
pub polygon: Polygon,
}
impl Validate for PolygonConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PolylineArray {
// Contains array of Polyline
#[yaserde(prefix = "tt", rename = "Segment")]
pub segment: Vec<Polyline>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PolylineArrayExtension>,
}
impl Validate for PolylineArray {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PolylineArrayExtension {}
impl Validate for PolylineArrayExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PolylineArrayConfiguration {
// Contains PolylineArray configuration data
#[yaserde(prefix = "tt", rename = "PolylineArray")]
pub polyline_array: PolylineArray,
}
impl Validate for PolylineArrayConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MotionExpression {
// Motion Expression data structure contains motion expression which is
// based on Scene Descriptor schema with XPATH syntax. The Type argument
// could allow introduction of different dialects
#[yaserde(prefix = "tt", rename = "Expression")]
pub expression: String,
#[yaserde(attribute, rename = "Type")]
pub _type: Option<String>,
}
impl Validate for MotionExpression {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MotionExpressionConfiguration {
// Contains Rule MotionExpression configuration
#[yaserde(prefix = "tt", rename = "MotionExpression")]
pub motion_expression: MotionExpression,
}
impl Validate for MotionExpressionConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CellLayout {
// Mapping of the cell grid to the Video frame. The cell grid is starting
// from the upper left corner and x dimension is going from left to right
// and the y dimension from up to down.
#[yaserde(prefix = "tt", rename = "Transformation")]
pub transformation: Transformation,
// Number of columns of the cell grid (x dimension)
#[yaserde(attribute, rename = "Columns")]
pub columns: xs::Integer,
// Number of rows of the cell grid (y dimension)
#[yaserde(attribute, rename = "Rows")]
pub rows: xs::Integer,
}
impl Validate for CellLayout {}
// Configuration of the streaming and coding settings of a Video window.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PaneConfiguration {
// Optional name of the pane configuration.
#[yaserde(prefix = "tt", rename = "PaneName")]
pub pane_name: Option<String>,
// If the device has audio outputs, this element contains a pointer to the
// audio output that is associated with the pane. A client
// can retrieve the available audio outputs of a device using the
// GetAudioOutputs command of the DeviceIO service.
#[yaserde(prefix = "tt", rename = "AudioOutputToken")]
pub audio_output_token: Option<ReferenceToken>,
// If the device has audio sources, this element contains a pointer to the
// audio source that is associated with this pane.
// The audio connection from a decoder device to the NVT is established
// using the backchannel mechanism. A client can retrieve the available
// audio sources of a device using the GetAudioSources command of the
// DeviceIO service.
#[yaserde(prefix = "tt", rename = "AudioSourceToken")]
pub audio_source_token: Option<ReferenceToken>,
// The configuration of the audio encoder including codec, bitrate
// and sample rate.
#[yaserde(prefix = "tt", rename = "AudioEncoderConfiguration")]
pub audio_encoder_configuration: Option<AudioEncoderConfiguration>,
// A pointer to a Receiver that has the necessary information to receive
// data from a Transmitter. This Receiver can be connected and the network
// video decoder displays the received data on the specified outputs. A
// client can retrieve the available Receivers using the
// GetReceivers command of the Receiver Service.
#[yaserde(prefix = "tt", rename = "ReceiverToken")]
pub receiver_token: Option<ReferenceToken>,
// A unique identifier in the display device.
#[yaserde(prefix = "tt", rename = "Token")]
pub token: ReferenceToken,
}
impl Validate for PaneConfiguration {}
// A pane layout describes one Video window of a display. It links a pane
// configuration to a region of the screen.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PaneLayout {
// Reference to the configuration of the streaming and coding parameters.
#[yaserde(prefix = "tt", rename = "Pane")]
pub pane: ReferenceToken,
// Describes the location and size of the area on the monitor. The area
// coordinate values are espressed in normalized units [-1.0, 1.0].
#[yaserde(prefix = "tt", rename = "Area")]
pub area: Rectangle,
}
impl Validate for PaneLayout {}
// A layout describes a set of Video windows that are displayed simultaniously
// on a display.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Layout {
// List of panes assembling the display layout.
#[yaserde(prefix = "tt", rename = "PaneLayout")]
pub pane_layout: Vec<PaneLayout>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<LayoutExtension>,
}
impl Validate for Layout {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct LayoutExtension {}
impl Validate for LayoutExtension {}
// This type contains the Audio and Video coding capabilities of a display
// service.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct CodingCapabilities {
// If the device supports audio encoding this section describes the
// supported codecs and their configuration.
#[yaserde(prefix = "tt", rename = "AudioEncodingCapabilities")]
pub audio_encoding_capabilities: Option<AudioEncoderConfigurationOptions>,
// If the device supports audio decoding this section describes the
// supported codecs and their settings.
#[yaserde(prefix = "tt", rename = "AudioDecodingCapabilities")]
pub audio_decoding_capabilities: Option<AudioDecoderConfigurationOptions>,
// This section describes the supported video codesc and their
// configuration.
#[yaserde(prefix = "tt", rename = "VideoDecodingCapabilities")]
pub video_decoding_capabilities: VideoDecoderConfigurationOptions,
}
impl Validate for CodingCapabilities {}
// The options supported for a display layout.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct LayoutOptions {
// Lists the possible Pane Layouts of the Video Output
#[yaserde(prefix = "tt", rename = "PaneLayoutOptions")]
pub pane_layout_options: Vec<PaneLayoutOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<LayoutOptionsExtension>,
}
impl Validate for LayoutOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct LayoutOptionsExtension {}
impl Validate for LayoutOptionsExtension {}
// Description of a pane layout describing a complete display layout.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PaneLayoutOptions {
// List of areas assembling a layout. Coordinate values are in the range
// [-1.0, 1.0].
#[yaserde(prefix = "tt", rename = "Area")]
pub area: Vec<Rectangle>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<PaneOptionExtension>,
}
impl Validate for PaneLayoutOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PaneOptionExtension {}
impl Validate for PaneOptionExtension {}
// Description of a receiver, including its token and configuration.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Receiver {
// Unique identifier of the receiver.
#[yaserde(prefix = "tt", rename = "Token")]
pub token: ReferenceToken,
// Describes the configuration of the receiver.
#[yaserde(prefix = "tt", rename = "Configuration")]
pub configuration: ReceiverConfiguration,
}
impl Validate for Receiver {}
// Describes the configuration of a receiver.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReceiverConfiguration {
// The following connection modes are defined:
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: ReceiverMode,
// Details of the URI to which the receiver should connect.
#[yaserde(prefix = "tt", rename = "MediaUri")]
pub media_uri: String,
// Stream connection parameters.
#[yaserde(prefix = "tt", rename = "StreamSetup")]
pub stream_setup: StreamSetup,
}
impl Validate for ReceiverConfiguration {}
// Specifies a receiver connection mode.
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ReceiverMode {
// The receiver connects on demand, as required by consumers of the media
// streams.
AutoConnect,
// The receiver attempts to maintain a persistent connection to the
// configured endpoint.
AlwaysConnect,
// The receiver does not attempt to connect.
NeverConnect,
// This case should never happen.
Unknown,
__Unknown__(String),
}
impl Default for ReceiverMode {
fn default() -> ReceiverMode {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ReceiverMode {}
// Specifies the current connection state of the receiver.
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ReceiverState {
// The receiver is not connected.
NotConnected,
// The receiver is attempting to connect.
Connecting,
// The receiver is connected.
Connected,
// This case should never happen.
Unknown,
__Unknown__(String),
}
impl Default for ReceiverState {
fn default() -> ReceiverState {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ReceiverState {}
// Contains information about a receiver's current state.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReceiverStateInformation {
// The connection state of the receiver may have one of the following
// states:
#[yaserde(prefix = "tt", rename = "State")]
pub state: ReceiverState,
// Indicates whether or not the receiver was created automatically.
#[yaserde(prefix = "tt", rename = "AutoCreated")]
pub auto_created: bool,
}
impl Validate for ReceiverStateInformation {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct ReceiverReference(pub ReferenceToken);
impl Validate for ReceiverReference {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct RecordingReference(pub ReferenceToken);
impl Validate for RecordingReference {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SourceReference {
#[yaserde(prefix = "tt", rename = "Token")]
pub token: ReferenceToken,
#[yaserde(attribute, rename = "Type")]
pub _type: Option<String>,
}
impl Validate for SourceReference {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct TrackReference(pub ReferenceToken);
impl Validate for TrackReference {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct Description(pub String);
impl Validate for Description {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct DateTimeRange {
#[yaserde(prefix = "tt", rename = "From")]
pub from: xs::DateTime,
#[yaserde(prefix = "tt", rename = "Until")]
pub until: xs::DateTime,
}
impl Validate for DateTimeRange {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingSummary {
// The earliest point in time where there is recorded data on the device.
#[yaserde(prefix = "tt", rename = "DataFrom")]
pub data_from: xs::DateTime,
// The most recent point in time where there is recorded data on the device.
#[yaserde(prefix = "tt", rename = "DataUntil")]
pub data_until: xs::DateTime,
// The device contains this many recordings.
#[yaserde(prefix = "tt", rename = "NumberRecordings")]
pub number_recordings: i32,
}
impl Validate for RecordingSummary {}
// A structure for defining a limited scope when searching in recorded data.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SearchScope {
// A list of sources that are included in the scope. If this list is
// included, only data from one of these sources shall be searched.
#[yaserde(prefix = "tt", rename = "IncludedSources")]
pub included_sources: Vec<SourceReference>,
// A list of recordings that are included in the scope. If this list is
// included, only data from one of these recordings shall be searched.
#[yaserde(prefix = "tt", rename = "IncludedRecordings")]
pub included_recordings: Vec<RecordingReference>,
// An xpath expression used to specify what recordings to search. Only those
// recordings with an RecordingInformation structure that matches the filter
// shall be searched.
#[yaserde(prefix = "tt", rename = "RecordingInformationFilter")]
pub recording_information_filter: Option<XpathExpression>,
// Extension point
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SearchScopeExtension>,
}
impl Validate for SearchScope {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SearchScopeExtension {}
impl Validate for SearchScopeExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EventFilter {}
impl Validate for EventFilter {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct PtzpositionFilter {
// The lower boundary of the PTZ volume to look for.
#[yaserde(prefix = "tt", rename = "MinPosition")]
pub min_position: Ptzvector,
// The upper boundary of the PTZ volume to look for.
#[yaserde(prefix = "tt", rename = "MaxPosition")]
pub max_position: Ptzvector,
// If true, search for when entering the specified PTZ volume.
#[yaserde(prefix = "tt", rename = "EnterOrExit")]
pub enter_or_exit: bool,
}
impl Validate for PtzpositionFilter {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataFilter {
#[yaserde(prefix = "tt", rename = "MetadataStreamFilter")]
pub metadata_stream_filter: XpathExpression,
}
impl Validate for MetadataFilter {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct XpathExpression(pub String);
impl Validate for XpathExpression {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindRecordingResultList {
// The state of the search when the result is returned. Indicates if there
// can be more results, or if the search is completed.
#[yaserde(prefix = "tt", rename = "SearchState")]
pub search_state: SearchState,
// A RecordingInformation structure for each found recording matching the
// search.
#[yaserde(prefix = "tt", rename = "RecordingInformation")]
pub recording_information: Vec<RecordingInformation>,
}
impl Validate for FindRecordingResultList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindEventResultList {
// The state of the search when the result is returned. Indicates if there
// can be more results, or if the search is completed.
#[yaserde(prefix = "tt", rename = "SearchState")]
pub search_state: SearchState,
// A FindEventResult structure for each found event matching the search.
#[yaserde(prefix = "tt", rename = "Result")]
pub result: Vec<FindEventResult>,
}
impl Validate for FindEventResultList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindEventResult {
// The recording where this event was found. Empty string if no recording is
// associated with this event.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// A reference to the track where this event was found. Empty string if no
// track is associated with this event.
#[yaserde(prefix = "tt", rename = "TrackToken")]
pub track_token: TrackReference,
// The time when the event occured.
#[yaserde(prefix = "tt", rename = "Time")]
pub time: xs::DateTime,
// The description of the event.
#[yaserde(prefix = "tt", rename = "Event")]
pub event: wsnt::NotificationMessageHolderType,
// If true, indicates that the event is a virtual event generated for this
// particular search session to give the state of a property at the start
// time of the search.
#[yaserde(prefix = "tt", rename = "StartStateEvent")]
pub start_state_event: bool,
}
impl Validate for FindEventResult {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindPTZPositionResultList {
// The state of the search when the result is returned. Indicates if there
// can be more results, or if the search is completed.
#[yaserde(prefix = "tt", rename = "SearchState")]
pub search_state: SearchState,
// A FindPTZPositionResult structure for each found PTZ position matching
// the search.
#[yaserde(prefix = "tt", rename = "Result")]
pub result: Vec<FindPTZPositionResult>,
}
impl Validate for FindPTZPositionResultList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindPTZPositionResult {
// A reference to the recording containing the PTZ position.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// A reference to the metadata track containing the PTZ position.
#[yaserde(prefix = "tt", rename = "TrackToken")]
pub track_token: TrackReference,
// The time when the PTZ position was valid.
#[yaserde(prefix = "tt", rename = "Time")]
pub time: xs::DateTime,
// The PTZ position.
#[yaserde(prefix = "tt", rename = "Position")]
pub position: Ptzvector,
}
impl Validate for FindPTZPositionResult {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindMetadataResultList {
// The state of the search when the result is returned. Indicates if there
// can be more results, or if the search is completed.
#[yaserde(prefix = "tt", rename = "SearchState")]
pub search_state: SearchState,
// A FindMetadataResult structure for each found set of Metadata matching
// the search.
#[yaserde(prefix = "tt", rename = "Result")]
pub result: Vec<FindMetadataResult>,
}
impl Validate for FindMetadataResultList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FindMetadataResult {
// A reference to the recording containing the metadata.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// A reference to the metadata track containing the matching metadata.
#[yaserde(prefix = "tt", rename = "TrackToken")]
pub track_token: TrackReference,
// The point in time when the matching metadata occurs in the metadata
// track.
#[yaserde(prefix = "tt", rename = "Time")]
pub time: xs::DateTime,
}
impl Validate for FindMetadataResult {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum SearchState {
// The search is queued and not yet started.
Queued,
// The search is underway and not yet completed.
Searching,
// The search has been completed and no new results will be found.
Completed,
// The state of the search is unknown. (This is not a valid response from
// GetSearchState.)
Unknown,
__Unknown__(String),
}
impl Default for SearchState {
fn default() -> SearchState {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for SearchState {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct JobToken(pub ReferenceToken);
impl Validate for JobToken {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingInformation {
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// Information about the source of the recording. This gives a description
// of where the data in the recording comes from. Since a single
// recording is intended to record related material, there is just one
// source. It is indicates the physical location or the
// major data source for the recording. Currently the recordingconfiguration
// cannot describe each individual data source.
#[yaserde(prefix = "tt", rename = "Source")]
pub source: RecordingSourceInformation,
#[yaserde(prefix = "tt", rename = "EarliestRecording")]
pub earliest_recording: Option<xs::DateTime>,
#[yaserde(prefix = "tt", rename = "LatestRecording")]
pub latest_recording: Option<xs::DateTime>,
#[yaserde(prefix = "tt", rename = "Content")]
pub content: Description,
// Basic information about the track. Note that a track may represent a
// single contiguous time span or consist of multiple slices.
#[yaserde(prefix = "tt", rename = "Track")]
pub track: Vec<TrackInformation>,
#[yaserde(prefix = "tt", rename = "RecordingStatus")]
pub recording_status: RecordingStatus,
}
impl Validate for RecordingInformation {}
// A set of informative desciptions of a data source. The Search searvice allows
// a client to filter on recordings based on information in this structure.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingSourceInformation {
// Identifier for the source chosen by the client that creates the
// structure.
// This identifier is opaque to the device. Clients may use any type of URI
// for this field. A device shall support at least 128 characters.
#[yaserde(prefix = "tt", rename = "SourceId")]
pub source_id: String,
// Informative user readable name of the source, e.g. "Camera23". A device
// shall support at least 20 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Informative description of the physical location of the source, e.g. the
// coordinates on a map.
#[yaserde(prefix = "tt", rename = "Location")]
pub location: Description,
// Informative description of the source.
#[yaserde(prefix = "tt", rename = "Description")]
pub description: Description,
// URI provided by the service supplying data to be recorded. A device shall
// support at least 128 characters.
#[yaserde(prefix = "tt", rename = "Address")]
pub address: String,
}
impl Validate for RecordingSourceInformation {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum RecordingStatus {
Initiated,
Recording,
Stopped,
Removing,
Removed,
// This case should never happen.
Unknown,
__Unknown__(String),
}
impl Default for RecordingStatus {
fn default() -> RecordingStatus {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for RecordingStatus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct TrackInformation {
#[yaserde(prefix = "tt", rename = "TrackToken")]
pub track_token: TrackReference,
// Type of the track: "Video", "Audio" or "Metadata".
// The track shall only be able to hold data of that type.
#[yaserde(prefix = "tt", rename = "TrackType")]
pub track_type: TrackType,
// Informative description of the contents of the track.
#[yaserde(prefix = "tt", rename = "Description")]
pub description: Description,
// The start date and time of the oldest recorded data in the track.
#[yaserde(prefix = "tt", rename = "DataFrom")]
pub data_from: xs::DateTime,
// The stop date and time of the newest recorded data in the track.
#[yaserde(prefix = "tt", rename = "DataTo")]
pub data_to: xs::DateTime,
}
impl Validate for TrackInformation {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum TrackType {
Video,
Audio,
Metadata,
// Placeholder for future extension.
Extended,
__Unknown__(String),
}
impl Default for TrackType {
fn default() -> TrackType {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for TrackType {}
// A set of media attributes valid for a recording at a point in time or for a
// time interval.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MediaAttributes {
// A reference to the recording that has these attributes.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// A set of attributes for each track.
#[yaserde(prefix = "tt", rename = "TrackAttributes")]
pub track_attributes: Vec<TrackAttributes>,
// The attributes are valid from this point in time in the recording.
#[yaserde(prefix = "tt", rename = "From")]
pub from: xs::DateTime,
// The attributes are valid until this point in time in the recording. Can
// be equal to 'From' to indicate that the attributes are only known to be
// valid for this particular point in time.
#[yaserde(prefix = "tt", rename = "Until")]
pub until: xs::DateTime,
}
impl Validate for MediaAttributes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct TrackAttributes {
// The basic information about the track. Note that a track may represent a
// single contiguous time span or consist of multiple slices.
#[yaserde(prefix = "tt", rename = "TrackInformation")]
pub track_information: TrackInformation,
// If the track is a video track, exactly one of this structure shall be
// present and contain the video attributes.
#[yaserde(prefix = "tt", rename = "VideoAttributes")]
pub video_attributes: Option<VideoAttributes>,
// If the track is an audio track, exactly one of this structure shall be
// present and contain the audio attributes.
#[yaserde(prefix = "tt", rename = "AudioAttributes")]
pub audio_attributes: Option<AudioAttributes>,
// If the track is an metadata track, exactly one of this structure shall be
// present and contain the metadata attributes.
#[yaserde(prefix = "tt", rename = "MetadataAttributes")]
pub metadata_attributes: Option<MetadataAttributes>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<TrackAttributesExtension>,
}
impl Validate for TrackAttributes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct TrackAttributesExtension {}
impl Validate for TrackAttributesExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct VideoAttributes {
// Average bitrate in kbps.
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: Option<i32>,
// The width of the video in pixels.
#[yaserde(prefix = "tt", rename = "Width")]
pub width: i32,
// The height of the video in pixels.
#[yaserde(prefix = "tt", rename = "Height")]
pub height: i32,
// Video encoding of the track. Use value from tt:VideoEncoding for MPEG4.
// Otherwise use values from tt:VideoEncodingMimeNames and
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: String,
// Average framerate in frames per second.
#[yaserde(prefix = "tt", rename = "Framerate")]
pub framerate: f64,
}
impl Validate for VideoAttributes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioAttributes {
// The bitrate in kbps.
#[yaserde(prefix = "tt", rename = "Bitrate")]
pub bitrate: Option<i32>,
// Audio encoding of the track. Use values from tt:AudioEncoding for G711
// and AAC. Otherwise use values from tt:AudioEncodingMimeNames and
#[yaserde(prefix = "tt", rename = "Encoding")]
pub encoding: String,
// The sample rate in kHz.
#[yaserde(prefix = "tt", rename = "Samplerate")]
pub samplerate: i32,
}
impl Validate for AudioAttributes {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataAttributes {
// Indicates that there can be PTZ data in the metadata track in the
// specified time interval.
#[yaserde(prefix = "tt", rename = "CanContainPTZ")]
pub can_contain_ptz: bool,
// Indicates that there can be analytics data in the metadata track in the
// specified time interval.
#[yaserde(prefix = "tt", rename = "CanContainAnalytics")]
pub can_contain_analytics: bool,
// Indicates that there can be notifications in the metadata track in the
// specified time interval.
#[yaserde(prefix = "tt", rename = "CanContainNotifications")]
pub can_contain_notifications: bool,
// List of all PTZ spaces active for recording. Note that events are only
// recorded on position changes and the actual point of recording may not
// necessarily contain an event of the specified type.
#[yaserde(attribute, rename = "PtzSpaces")]
pub ptz_spaces: Option<StringAttrList>,
}
impl Validate for MetadataAttributes {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct RecordingJobReference(pub ReferenceToken);
impl Validate for RecordingJobReference {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingConfiguration {
// Information about the source of the recording.
#[yaserde(prefix = "tt", rename = "Source")]
pub source: RecordingSourceInformation,
// Informative description of the source.
#[yaserde(prefix = "tt", rename = "Content")]
pub content: Description,
// Sspecifies the maximum time that data in any track within the
// recording shall be stored. The device shall delete any data older than
// the maximum retention
// time. Such data shall not be accessible anymore. If the
// MaximumRetentionPeriod is set to 0,
// the device shall not limit the retention time of stored data, except by
// resource constraints.
// Whatever the value of MaximumRetentionTime, the device may automatically
// delete
// recordings to free up storage space for new recordings.
#[yaserde(prefix = "tt", rename = "MaximumRetentionTime")]
pub maximum_retention_time: xs::Duration,
}
impl Validate for RecordingConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct TrackConfiguration {
// Type of the track. It shall be equal to the strings “Video”,
// “Audio” or “Metadata”. The track shall only be able to hold data
// of that type.
#[yaserde(prefix = "tt", rename = "TrackType")]
pub track_type: TrackType,
// Informative description of the track.
#[yaserde(prefix = "tt", rename = "Description")]
pub description: Description,
}
impl Validate for TrackConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct GetRecordingsResponseItem {
// Token of the recording.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// Configuration of the recording.
#[yaserde(prefix = "tt", rename = "Configuration")]
pub configuration: RecordingConfiguration,
// List of tracks.
#[yaserde(prefix = "tt", rename = "Tracks")]
pub tracks: GetTracksResponseList,
}
impl Validate for GetRecordingsResponseItem {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct GetTracksResponseList {
// Configuration of a track.
#[yaserde(prefix = "tt", rename = "Track")]
pub track: Vec<GetTracksResponseItem>,
}
impl Validate for GetTracksResponseList {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct GetTracksResponseItem {
// Token of the track.
#[yaserde(prefix = "tt", rename = "TrackToken")]
pub track_token: TrackReference,
// Configuration of the track.
#[yaserde(prefix = "tt", rename = "Configuration")]
pub configuration: TrackConfiguration,
}
impl Validate for GetTracksResponseItem {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobConfiguration {
// Identifies the recording to which this job shall store the received data.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// The mode of the job. If it is idle, nothing shall happen. If it is
// active, the device shall try
// to obtain data from the receivers. A client shall use
// GetRecordingJobState to determine if data transfer is really taking
// place.
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: RecordingJobMode,
// This shall be a non-negative number. If there are multiple recording jobs
// that store data to
// the same track, the device will only store the data for the recording job
// with the highest
// priority. The priority is specified per recording job, but the device
// shall determine the priority
// of each track individually. If there are two recording jobs with the same
// priority, the device
// shall record the data corresponding to the recording job that was
// activated the latest.
#[yaserde(prefix = "tt", rename = "Priority")]
pub priority: i32,
// Source of the recording.
#[yaserde(prefix = "tt", rename = "Source")]
pub source: Vec<RecordingJobSource>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RecordingJobConfigurationExtension>,
// This attribute adds an additional requirement for activating the
// recording job.
// If this optional field is provided the job shall only record if the
// schedule exists and is active.
#[yaserde(attribute, rename = "ScheduleToken")]
pub schedule_token: Option<String>,
}
impl Validate for RecordingJobConfiguration {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct RecordingJobMode(pub String);
impl Validate for RecordingJobMode {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobConfigurationExtension {}
impl Validate for RecordingJobConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobSource {
// This field shall be a reference to the source of the data. The type of
// the source
// is determined by the attribute Type in the SourceToken structure. If Type
// is
// http://www.onvif.org/ver10/schema/Receiver, the token is a
// ReceiverReference. In this case
// the device shall receive the data over the network. If Type is
// http://www.onvif.org/ver10/schema/Profile, the token identifies a media
// profile, instructing the
// device to obtain data from a profile that exists on the local device.
#[yaserde(prefix = "tt", rename = "SourceToken")]
pub source_token: Option<SourceReference>,
// If this field is TRUE, and if the SourceToken is omitted, the device
// shall create a receiver object (through the receiver service) and assign
// the
// ReceiverReference to the SourceToken field. When retrieving the
// RecordingJobConfiguration
// from the device, the AutoCreateReceiver field shall never be present.
#[yaserde(prefix = "tt", rename = "AutoCreateReceiver")]
pub auto_create_receiver: Option<bool>,
// List of tracks associated with the recording.
#[yaserde(prefix = "tt", rename = "Tracks")]
pub tracks: Vec<RecordingJobTrack>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RecordingJobSourceExtension>,
}
impl Validate for RecordingJobSource {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobSourceExtension {}
impl Validate for RecordingJobSourceExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobTrack {
// If the received RTSP stream contains multiple tracks of the same type,
// the
// SourceTag differentiates between those Tracks. This field can be ignored
// in case of recording a local source.
#[yaserde(prefix = "tt", rename = "SourceTag")]
pub source_tag: String,
// The destination is the tracktoken of the track to which the device shall
// store the
// received data.
#[yaserde(prefix = "tt", rename = "Destination")]
pub destination: TrackReference,
}
impl Validate for RecordingJobTrack {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobStateInformation {
// Identification of the recording that the recording job records to.
#[yaserde(prefix = "tt", rename = "RecordingToken")]
pub recording_token: RecordingReference,
// Holds the aggregated state over the whole RecordingJobInformation
// structure.
#[yaserde(prefix = "tt", rename = "State")]
pub state: RecordingJobState,
// Identifies the data source of the recording job.
#[yaserde(prefix = "tt", rename = "Sources")]
pub sources: Vec<RecordingJobStateSource>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<RecordingJobStateInformationExtension>,
}
impl Validate for RecordingJobStateInformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobStateInformationExtension {}
impl Validate for RecordingJobStateInformationExtension {}
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct RecordingJobState(pub String);
impl Validate for RecordingJobState {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobStateSource {
// Identifies the data source of the recording job.
#[yaserde(prefix = "tt", rename = "SourceToken")]
pub source_token: SourceReference,
// Holds the aggregated state over all substructures of
// RecordingJobStateSource.
#[yaserde(prefix = "tt", rename = "State")]
pub state: RecordingJobState,
// List of track items.
#[yaserde(prefix = "tt", rename = "Tracks")]
pub tracks: RecordingJobStateTracks,
}
impl Validate for RecordingJobStateSource {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobStateTracks {
#[yaserde(prefix = "tt", rename = "Track")]
pub track: Vec<RecordingJobStateTrack>,
}
impl Validate for RecordingJobStateTracks {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct RecordingJobStateTrack {
// Identifies the track of the data source that provides the data.
#[yaserde(prefix = "tt", rename = "SourceTag")]
pub source_tag: String,
// Indicates the destination track.
#[yaserde(prefix = "tt", rename = "Destination")]
pub destination: TrackReference,
// Optionally holds an implementation defined string value that describes
// the error.
// The string should be in the English language.
#[yaserde(prefix = "tt", rename = "Error")]
pub error: Option<String>,
// Provides the job state of the track. The valid
// values of state shall be “Idle”, “Active” and “Error”. If
// state equals “Error”, the Error field may be filled in with an
// implementation defined value.
#[yaserde(prefix = "tt", rename = "State")]
pub state: RecordingJobState,
}
impl Validate for RecordingJobStateTrack {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct GetRecordingJobsResponseItem {
#[yaserde(prefix = "tt", rename = "JobToken")]
pub job_token: RecordingJobReference,
#[yaserde(prefix = "tt", rename = "JobConfiguration")]
pub job_configuration: RecordingJobConfiguration,
}
impl Validate for GetRecordingJobsResponseItem {}
// Configuration parameters for the replay service.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ReplayConfiguration {
// The RTSP session timeout.
#[yaserde(prefix = "tt", rename = "SessionTimeout")]
pub session_timeout: xs::Duration,
}
impl Validate for ReplayConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngine {
#[yaserde(prefix = "tt", rename = "AnalyticsEngineConfiguration")]
pub analytics_engine_configuration: AnalyticsDeviceEngineConfiguration,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AnalyticsEngine {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsDeviceEngineConfiguration {
#[yaserde(prefix = "tt", rename = "EngineConfiguration")]
pub engine_configuration: Vec<EngineConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AnalyticsDeviceEngineConfigurationExtension>,
}
impl Validate for AnalyticsDeviceEngineConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsDeviceEngineConfigurationExtension {}
impl Validate for AnalyticsDeviceEngineConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct EngineConfiguration {
#[yaserde(prefix = "tt", rename = "VideoAnalyticsConfiguration")]
pub video_analytics_configuration: VideoAnalyticsConfiguration,
#[yaserde(prefix = "tt", rename = "AnalyticsEngineInputInfo")]
pub analytics_engine_input_info: AnalyticsEngineInputInfo,
}
impl Validate for EngineConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngineInputInfo {
#[yaserde(prefix = "tt", rename = "InputInfo")]
pub input_info: Option<Config>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AnalyticsEngineInputInfoExtension>,
}
impl Validate for AnalyticsEngineInputInfo {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngineInputInfoExtension {}
impl Validate for AnalyticsEngineInputInfoExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngineInput {
#[yaserde(prefix = "tt", rename = "SourceIdentification")]
pub source_identification: SourceIdentification,
#[yaserde(prefix = "tt", rename = "VideoInput")]
pub video_input: VideoEncoderConfiguration,
#[yaserde(prefix = "tt", rename = "MetadataInput")]
pub metadata_input: MetadataInput,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AnalyticsEngineInput {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SourceIdentification {
#[yaserde(prefix = "tt", rename = "Name")]
pub name: String,
#[yaserde(prefix = "tt", rename = "Token")]
pub token: Vec<ReferenceToken>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<SourceIdentificationExtension>,
}
impl Validate for SourceIdentification {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct SourceIdentificationExtension {}
impl Validate for SourceIdentificationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataInput {
#[yaserde(prefix = "tt", rename = "MetadataConfig")]
pub metadata_config: Vec<Config>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<MetadataInputExtension>,
}
impl Validate for MetadataInput {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MetadataInputExtension {}
impl Validate for MetadataInputExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsEngineControl {
// Token of the analytics engine (AnalyticsEngine) being controlled.
#[yaserde(prefix = "tt", rename = "EngineToken")]
pub engine_token: ReferenceToken,
// Token of the analytics engine configuration (VideoAnalyticsConfiguration)
// in effect.
#[yaserde(prefix = "tt", rename = "EngineConfigToken")]
pub engine_config_token: ReferenceToken,
// Tokens of the input (AnalyticsEngineInput) configuration applied.
#[yaserde(prefix = "tt", rename = "InputToken")]
pub input_token: Vec<ReferenceToken>,
// Tokens of the receiver providing media input data. The order of
// ReceiverToken shall exactly match the order of InputToken.
#[yaserde(prefix = "tt", rename = "ReceiverToken")]
pub receiver_token: Vec<ReferenceToken>,
#[yaserde(prefix = "tt", rename = "Multicast")]
pub multicast: Option<MulticastConfiguration>,
#[yaserde(prefix = "tt", rename = "Subscription")]
pub subscription: Config,
#[yaserde(prefix = "tt", rename = "Mode")]
pub mode: ModeOfOperation,
// User readable name. Length up to 64 characters.
#[yaserde(prefix = "tt", rename = "Name")]
pub name: Name,
// Number of internal references currently using this configuration.
#[yaserde(prefix = "tt", rename = "UseCount")]
pub use_count: i32,
// Token that uniquely references this configuration. Length up to 64
// characters.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for AnalyticsEngineControl {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ModeOfOperation {
Idle,
Active,
// This case should never happen.
Unknown,
__Unknown__(String),
}
impl Default for ModeOfOperation {
fn default() -> ModeOfOperation {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ModeOfOperation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsStateInformation {
// Token of the control object whose status is requested.
#[yaserde(prefix = "tt", rename = "AnalyticsEngineControlToken")]
pub analytics_engine_control_token: ReferenceToken,
#[yaserde(prefix = "tt", rename = "State")]
pub state: AnalyticsState,
}
impl Validate for AnalyticsStateInformation {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AnalyticsState {
#[yaserde(prefix = "tt", rename = "Error")]
pub error: Option<String>,
#[yaserde(prefix = "tt", rename = "State")]
pub state: String,
}
impl Validate for AnalyticsState {}
// Action Engine Event Payload data structure contains the information about the
// ONVIF command invocations. Since this event could be generated by other or
// proprietary actions, the command invocation specific fields are defined as
// optional and additional extension mechanism is provided for future or
// additional action definitions.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ActionEngineEventPayload {
// Request Message
#[yaserde(prefix = "tt", rename = "RequestInfo")]
pub request_info: soapenv::Envelope,
// Response Message
#[yaserde(prefix = "tt", rename = "ResponseInfo")]
pub response_info: soapenv::Envelope,
// Fault Message
#[yaserde(prefix = "tt", rename = "Fault")]
pub fault: soapenv::Fault,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ActionEngineEventPayloadExtension>,
}
impl Validate for ActionEngineEventPayload {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ActionEngineEventPayloadExtension {}
impl Validate for ActionEngineEventPayloadExtension {}
// AudioClassType acceptable values are;
// gun_shot, scream, glass_breaking, tire_screech
#[derive(Default, PartialEq, Debug, UtilsTupleIo, UtilsDefaultSerde)]
pub struct AudioClassType(pub String);
impl Validate for AudioClassType {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioClassCandidate {
// Indicates audio class label
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: AudioClassType,
// A likelihood/probability that the corresponding audio event belongs to
// this class. The sum of the likelihoods shall NOT exceed 1
#[yaserde(prefix = "tt", rename = "Likelihood")]
pub likelihood: f64,
}
impl Validate for AudioClassCandidate {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioClassDescriptor {
// Array of audio class label and class probability
#[yaserde(prefix = "tt", rename = "ClassCandidate")]
pub class_candidate: Vec<AudioClassCandidate>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<AudioClassDescriptorExtension>,
}
impl Validate for AudioClassDescriptor {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct AudioClassDescriptorExtension {}
impl Validate for AudioClassDescriptorExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ActiveConnection {
#[yaserde(prefix = "tt", rename = "CurrentBitrate")]
pub current_bitrate: f64,
#[yaserde(prefix = "tt", rename = "CurrentFps")]
pub current_fps: f64,
}
impl Validate for ActiveConnection {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ProfileStatus {
#[yaserde(prefix = "tt", rename = "ActiveConnections")]
pub active_connections: Vec<ActiveConnection>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ProfileStatusExtension>,
}
impl Validate for ProfileStatus {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ProfileStatusExtension {}
impl Validate for ProfileStatusExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Osdreference {}
impl Validate for Osdreference {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum Osdtype {
Text,
Image,
Extended,
__Unknown__(String),
}
impl Default for Osdtype {
fn default() -> Osdtype {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for Osdtype {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdposConfiguration {
// For OSD position type, following are the pre-defined:
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: String,
#[yaserde(prefix = "tt", rename = "Pos")]
pub pos: Option<Vector>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdposConfigurationExtension>,
}
impl Validate for OsdposConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdposConfigurationExtension {}
impl Validate for OsdposConfigurationExtension {}
// The value range of "Transparent" could be defined by vendors only should
// follow this rule: the minimum value means non-transparent and the maximum
// value maens fully transparent.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Osdcolor {
#[yaserde(prefix = "tt", rename = "Color")]
pub color: Color,
#[yaserde(attribute, rename = "Transparent")]
pub transparent: Option<i32>,
}
impl Validate for Osdcolor {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdtextConfiguration {
// The following OSD Text Type are defined:
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: String,
// List of supported OSD date formats. This element shall be present when
// the value of Type field has Date or DateAndTime. The following DateFormat
// are defined:
#[yaserde(prefix = "tt", rename = "DateFormat")]
pub date_format: Option<String>,
// List of supported OSD time formats. This element shall be present when
// the value of Type field has Time or DateAndTime. The following TimeFormat
// are defined:
#[yaserde(prefix = "tt", rename = "TimeFormat")]
pub time_format: Option<String>,
// Font size of the text in pt.
#[yaserde(prefix = "tt", rename = "FontSize")]
pub font_size: Option<i32>,
// Font color of the text.
#[yaserde(prefix = "tt", rename = "FontColor")]
pub font_color: Option<Osdcolor>,
// Background color of the text.
#[yaserde(prefix = "tt", rename = "BackgroundColor")]
pub background_color: Option<Osdcolor>,
// The content of text to be displayed.
#[yaserde(prefix = "tt", rename = "PlainText")]
pub plain_text: Option<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdtextConfigurationExtension>,
// This flag is applicable for Type Plain and defaults to true. When set to
// false the PlainText content will not be persistent across device reboots.
#[yaserde(attribute, rename = "IsPersistentText")]
pub is_persistent_text: Option<bool>,
}
impl Validate for OsdtextConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdtextConfigurationExtension {}
impl Validate for OsdtextConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdimgConfiguration {
// The URI of the image which to be displayed.
#[yaserde(prefix = "tt", rename = "ImgPath")]
pub img_path: String,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdimgConfigurationExtension>,
}
impl Validate for OsdimgConfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdimgConfigurationExtension {}
impl Validate for OsdimgConfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ColorspaceRange {
#[yaserde(prefix = "tt", rename = "X")]
pub x: FloatRange,
#[yaserde(prefix = "tt", rename = "Y")]
pub y: FloatRange,
#[yaserde(prefix = "tt", rename = "Z")]
pub z: FloatRange,
// Acceptable values are the same as in tt:Color.
#[yaserde(prefix = "tt", rename = "Colorspace")]
pub colorspace: String,
}
impl Validate for ColorspaceRange {}
#[derive(PartialEq, Debug, YaSerialize, YaDeserialize)]
pub enum ColorOptionsChoice {
// List the supported color.
ColorList(Vec<Color>),
// Define the range of color supported.
ColorspaceRange(Vec<ColorspaceRange>),
__Unknown__(String),
}
impl Default for ColorOptionsChoice {
fn default() -> ColorOptionsChoice {
Self::__Unknown__("No valid variants".into())
}
}
impl Validate for ColorOptionsChoice {}
// Describe the colors supported. Either list each color or define the range of
// color values.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ColorOptions {
#[yaserde(flatten)]
pub color_options_choice: ColorOptionsChoice,
}
impl Validate for ColorOptions {}
// Describe the option of the color and its transparency.
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdcolorOptions {
// Optional list of supported colors.
#[yaserde(prefix = "tt", rename = "Color")]
pub color: Option<ColorOptions>,
// Range of the transparent level. Larger means more tranparent.
#[yaserde(prefix = "tt", rename = "Transparent")]
pub transparent: Option<IntRange>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdcolorOptionsExtension>,
}
impl Validate for OsdcolorOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdcolorOptionsExtension {}
impl Validate for OsdcolorOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdtextOptions {
// List of supported OSD text type. When a device indicates the supported
// number relating to Text type in MaximumNumberOfOSDs, the type shall be
// presented.
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: Vec<String>,
// Range of the font size value.
#[yaserde(prefix = "tt", rename = "FontSizeRange")]
pub font_size_range: Option<IntRange>,
// List of supported date format.
#[yaserde(prefix = "tt", rename = "DateFormat")]
pub date_format: Vec<String>,
// List of supported time format.
#[yaserde(prefix = "tt", rename = "TimeFormat")]
pub time_format: Vec<String>,
// List of supported font color.
#[yaserde(prefix = "tt", rename = "FontColor")]
pub font_color: Option<OsdcolorOptions>,
// List of supported background color.
#[yaserde(prefix = "tt", rename = "BackgroundColor")]
pub background_color: Option<OsdcolorOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdtextOptionsExtension>,
}
impl Validate for OsdtextOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdtextOptionsExtension {}
impl Validate for OsdtextOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdimgOptions {
// List of available image URIs.
#[yaserde(prefix = "tt", rename = "ImagePath")]
pub image_path: Vec<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdimgOptionsExtension>,
// List of supported image MIME types, such as "image/png".
#[yaserde(attribute, rename = "FormatsSupported")]
pub formats_supported: Option<StringAttrList>,
// The maximum size (in bytes) of the image that can be uploaded.
#[yaserde(attribute, rename = "MaxSize")]
pub max_size: Option<i32>,
// The maximum width (in pixels) of the image that can be uploaded.
#[yaserde(attribute, rename = "MaxWidth")]
pub max_width: Option<i32>,
// The maximum height (in pixels) of the image that can be uploaded.
#[yaserde(attribute, rename = "MaxHeight")]
pub max_height: Option<i32>,
}
impl Validate for OsdimgOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdimgOptionsExtension {}
impl Validate for OsdimgOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct Osdconfiguration {
// Reference to the video source configuration.
#[yaserde(prefix = "tt", rename = "VideoSourceConfigurationToken")]
pub video_source_configuration_token: Osdreference,
// Type of OSD.
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: Osdtype,
// Position configuration of OSD.
#[yaserde(prefix = "tt", rename = "Position")]
pub position: OsdposConfiguration,
// Text configuration of OSD. It shall be present when the value of Type
// field is Text.
#[yaserde(prefix = "tt", rename = "TextString")]
pub text_string: Option<OsdtextConfiguration>,
// Image configuration of OSD. It shall be present when the value of Type
// field is Image
#[yaserde(prefix = "tt", rename = "Image")]
pub image: Option<OsdimgConfiguration>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdconfigurationExtension>,
// Unique identifier referencing the physical entity.
#[yaserde(attribute, rename = "token")]
pub token: ReferenceToken,
}
impl Validate for Osdconfiguration {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdconfigurationExtension {}
impl Validate for OsdconfigurationExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct MaximumNumberOfOSDs {
#[yaserde(attribute, rename = "Total")]
pub total: i32,
#[yaserde(attribute, rename = "Image")]
pub image: Option<i32>,
#[yaserde(attribute, rename = "PlainText")]
pub plain_text: Option<i32>,
#[yaserde(attribute, rename = "Date")]
pub date: Option<i32>,
#[yaserde(attribute, rename = "Time")]
pub time: Option<i32>,
#[yaserde(attribute, rename = "DateAndTime")]
pub date_and_time: Option<i32>,
}
impl Validate for MaximumNumberOfOSDs {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdconfigurationOptions {
// The maximum number of OSD configurations supported for the specified
// video source configuration. If the configuration does not support OSDs,
// this value shall be zero and the Type and PositionOption elements are
// ignored. If a device limits the number of instances by OSDType, it shall
// indicate the supported number for each type via the related attribute.
#[yaserde(prefix = "tt", rename = "MaximumNumberOfOSDs")]
pub maximum_number_of_os_ds: MaximumNumberOfOSDs,
// List supported type of OSD configuration. When a device indicates the
// supported number for each types in MaximumNumberOfOSDs, related type
// shall be presented. A device shall return Option element relating to
// listed type.
#[yaserde(prefix = "tt", rename = "Type")]
pub _type: Vec<Osdtype>,
// List available OSD position type. Following are the pre-defined:
#[yaserde(prefix = "tt", rename = "PositionOption")]
pub position_option: Vec<String>,
// Option of the OSD text configuration. This element shall be returned if
// the device is signaling the support for Text.
#[yaserde(prefix = "tt", rename = "TextOption")]
pub text_option: Option<OsdtextOptions>,
// Option of the OSD image configuration. This element shall be returned if
// the device is signaling the support for Image.
#[yaserde(prefix = "tt", rename = "ImageOption")]
pub image_option: Option<OsdimgOptions>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<OsdconfigurationOptionsExtension>,
}
impl Validate for OsdconfigurationOptions {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct OsdconfigurationOptionsExtension {}
impl Validate for OsdconfigurationOptionsExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct FileProgress {
// Exported file name
#[yaserde(prefix = "tt", rename = "FileName")]
pub file_name: String,
// Normalized percentage completion for uploading the exported file
#[yaserde(prefix = "tt", rename = "Progress")]
pub progress: f64,
}
impl Validate for FileProgress {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ArrayOfFileProgress {
// Exported file name and export progress information
#[yaserde(prefix = "tt", rename = "FileProgress")]
pub file_progress: Vec<FileProgress>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<ArrayOfFileProgressExtension>,
}
impl Validate for ArrayOfFileProgress {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct ArrayOfFileProgressExtension {}
impl Validate for ArrayOfFileProgressExtension {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct StorageReferencePath {
// identifier of an existing Storage Configuration.
#[yaserde(prefix = "tt", rename = "StorageToken")]
pub storage_token: ReferenceToken,
// gives the relative directory path on the storage
#[yaserde(prefix = "tt", rename = "RelativePath")]
pub relative_path: Option<String>,
#[yaserde(prefix = "tt", rename = "Extension")]
pub extension: Option<StorageReferencePathExtension>,
}
impl Validate for StorageReferencePath {}
#[derive(Default, PartialEq, Debug, YaSerialize, YaDeserialize)]
#[yaserde(prefix = "tt", namespace = "tt: http://www.onvif.org/ver10/schema")]
pub struct StorageReferencePathExtension {}
impl Validate for StorageReferencePathExtension {}
| 35.506526 | 132 | 0.698199 |
67f7b472f9bbf8dd0121a6f38f041c73717dafd5 | 1,190 | use ferrite_session::{
either::*,
prelude::*,
};
pub fn internal_choice_session() -> Session<End>
{
let client: Session<
ReceiveChannel<
InternalChoice<Either<SendValue<String, End>, ReceiveValue<u64, End>>>,
End,
>,
> = receive_channel(|chan| {
case! { chan ;
Left => {
receive_value_from ( chan,
move |val: String| {
println! ("receied string: {}", val);
wait ( chan,
terminate () )
})
}
Right => {
send_value_to ( chan,
42,
wait ( chan,
terminate () ) )
}
}
});
let provider_left: Session<
InternalChoice<Either<SendValue<String, End>, ReceiveValue<u64, End>>>,
> = offer_case!(Left, send_value("provider_left".to_string(), terminate()));
let _provider_right: Session<
InternalChoice<Either<SendValue<String, End>, ReceiveValue<u64, End>>>,
> = offer_case!(
Right,
receive_value(|val: u64| {
println!("received int: {}", val);
terminate()
})
);
apply_channel(client, provider_left)
}
#[tokio::main]
pub async fn main()
{
run_session(internal_choice_session()).await
}
| 22.037037 | 78 | 0.569748 |
f7ca03dec65f07814a47a7b4d0d0c326a48120fc | 6,599 | use crate::prelude::*;
pub struct Fold<I, ID, F> {
pub(crate) base: I,
pub(crate) id: ID,
pub(crate) fold: F,
}
impl<T, I, ID, F> ParallelIterator for Fold<I, ID, F>
where
I: ParallelIterator,
F: Fn(T, I::Item) -> T + Sync + Send,
ID: Fn() -> T + Sync + Send,
T: Send,
{
type Item = T;
type Controlled = I::Controlled;
type Enumerable = I::Enumerable;
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> C::Result {
let fold_consumer = FoldConsumer {
base: consumer,
id: &self.id,
fold: &self.fold,
};
self.base.drive(fold_consumer)
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where
CB: ProducerCallback<Self::Item>,
{
return self.base.with_producer(Callback {
callback,
id: self.id,
fold: self.fold,
});
struct Callback<CB, ID, F> {
callback: CB,
id: ID,
fold: F,
}
impl<CB, T, R, ID, F> ProducerCallback<T> for Callback<CB, ID, F>
where
CB: ProducerCallback<R>,
F: Fn(R, T) -> R + Sync + Send,
ID: Fn() -> R + Sync + Send,
T: Send,
R: Send,
{
type Output = CB::Output;
fn call<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = FoldProducer {
init: None,
base: Some(base),
id: &self.id,
fold: &self.fold,
};
self.callback.call(producer)
}
}
}
}
struct FoldProducer<'f, T, I, ID, F> {
init: Option<T>,
base: Option<I>,
id: &'f ID,
fold: &'f F,
}
impl<'f, T, I, ID, F> Iterator for FoldProducer<'f, T, I, ID, F>
where
I: Iterator,
F: Fn(T, I::Item) -> T,
ID: Fn() -> T,
{
type Item = T;
fn size_hint(&self) -> (usize, Option<usize>) {
if self.base.is_some() {
(1, Some(1))
} else {
(0, Some(0))
}
}
fn next(&mut self) -> Option<Self::Item> {
self.base
.take()
.map(|b| b.fold(self.init.take().unwrap_or_else(self.id), self.fold))
}
}
impl<'f, T, I, ID, F> DoubleEndedIterator for FoldProducer<'f, T, I, ID, F>
where
I: DoubleEndedIterator,
F: Fn(T, I::Item) -> T,
ID: Fn() -> T,
{
fn next_back(&mut self) -> Option<Self::Item> {
self.base
.take()
.map(|b| b.rfold(self.init.take().unwrap_or_else(self.id), self.fold))
}
}
impl<'f, T, I, ID, F> Divisible for FoldProducer<'f, T, I, ID, F>
where
I: Producer,
F: Fn(T, I::Item) -> T,
ID: Fn() -> T,
{
type Controlled = I::Controlled;
fn should_be_divided(&self) -> bool {
self.base
.as_ref()
.map(|b| b.should_be_divided())
.unwrap_or(false)
}
fn divide(self) -> (Self, Self) {
let (left, right) = self
.base
.map(|b| {
let (l, r) = b.divide();
(Some(l), Some(r))
})
.unwrap_or((None, None));
(
FoldProducer {
init: self.init,
base: left,
id: self.id,
fold: self.fold,
},
FoldProducer {
init: None,
base: right,
id: self.id,
fold: self.fold,
},
)
}
fn divide_at(self, index: usize) -> (Self, Self) {
let (left, right) = self
.base
.map(|b| {
let (l, r) = b.divide_at(index);
(Some(l), Some(r))
})
.unwrap_or((None, None));
(
FoldProducer {
init: self.init,
base: left,
id: self.id,
fold: self.fold,
},
FoldProducer {
init: None,
base: right,
id: self.id,
fold: self.fold,
},
)
}
}
impl<'f, T, I, ID, F> Producer for FoldProducer<'f, T, I, ID, F>
where
T: Send,
I: Producer,
F: Fn(T, I::Item) -> T + Sync,
ID: Fn() -> T + Sync,
{
fn sizes(&self) -> (usize, Option<usize>) {
self.base
.as_ref()
.map(|b| b.sizes())
.unwrap_or((0, Some(0)))
}
fn preview(&self, _: usize) -> Self::Item {
panic!("FoldProducer is not previewable")
}
fn scheduler<'s, P: 's, R: 's>(&self) -> Box<dyn Scheduler<P, R> + 's>
where
P: Producer,
P::Item: Send,
R: Reducer<P::Item>,
{
self.base.as_ref().map(|b| b.scheduler()).unwrap()
}
/// same as in worker, we don't use fold op here
fn partial_fold<B, FO>(&mut self, init: B, _fold_op: FO, limit: usize) -> B
where
B: Send,
FO: Fn(B, Self::Item) -> B,
{
let inner_fold_op = self.fold;
let inner_id = self.id;
if let Some(base) = self.base.as_mut() {
self.init = Some(base.partial_fold(
self.init.take().unwrap_or_else(inner_id),
inner_fold_op,
limit,
))
}
init
}
fn micro_block_sizes(&self) -> (usize, usize) {
self.base
.as_ref()
.map(|inner| inner.micro_block_sizes())
.unwrap_or((1, usize::MAX))
}
}
// consumer
struct FoldConsumer<'f, C, ID, F> {
base: C,
id: &'f ID,
fold: &'f F,
}
impl<'f, C: Clone, ID, F> Clone for FoldConsumer<'f, C, ID, F> {
fn clone(&self) -> Self {
FoldConsumer {
base: self.base.clone(),
id: self.id,
fold: self.fold,
}
}
}
impl<'f, T, Item, C, ID, F> Consumer<Item> for FoldConsumer<'f, C, ID, F>
where
T: Send,
C: Consumer<T>,
F: Fn(T, Item) -> T + Sync,
ID: Fn() -> T + Sync,
{
type Result = C::Result;
type Reducer = C::Reducer;
fn consume_producer<P>(self, producer: P) -> Self::Result
where
P: Producer<Item = Item>,
{
let fold_producer = FoldProducer {
init: None,
base: Some(producer),
id: self.id,
fold: self.fold,
};
self.base.consume_producer(fold_producer)
}
fn to_reducer(self) -> Self::Reducer {
self.base.to_reducer()
}
}
| 24.531599 | 82 | 0.449765 |
388db3f409f24368ee6e38b1bd104403b4992889 | 116 | use Vec3f;
#[derive(Clone, Debug, PartialEq)]
pub struct ContactInfo {
pub time: f32,
pub normal: Vec3f,
}
| 14.5 | 34 | 0.663793 |
db8c4ab7c598848d01b60516aa754738d7d0f56e | 16,066 | //! Defines the common used enums.
use crate::common::*;
/// The enumeration of options.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Rs2Option {
BacklightCompensation = sys::rs2_option_RS2_OPTION_BACKLIGHT_COMPENSATION,
Brightness = sys::rs2_option_RS2_OPTION_BRIGHTNESS,
Contrast = sys::rs2_option_RS2_OPTION_CONTRAST,
Exposure = sys::rs2_option_RS2_OPTION_EXPOSURE,
Gain = sys::rs2_option_RS2_OPTION_GAIN,
Gamma = sys::rs2_option_RS2_OPTION_GAMMA,
Hue = sys::rs2_option_RS2_OPTION_HUE,
Saturation = sys::rs2_option_RS2_OPTION_SATURATION,
Sharpness = sys::rs2_option_RS2_OPTION_SHARPNESS,
WhiteBalance = sys::rs2_option_RS2_OPTION_WHITE_BALANCE,
EnableAutoExposure = sys::rs2_option_RS2_OPTION_ENABLE_AUTO_EXPOSURE,
EnableAutoWhiteBalance = sys::rs2_option_RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE,
VisualPreset = sys::rs2_option_RS2_OPTION_VISUAL_PRESET,
LaserPower = sys::rs2_option_RS2_OPTION_LASER_POWER,
Accuracy = sys::rs2_option_RS2_OPTION_ACCURACY,
MotionRange = sys::rs2_option_RS2_OPTION_MOTION_RANGE,
FilterOption = sys::rs2_option_RS2_OPTION_FILTER_OPTION,
ConfidenceThreshold = sys::rs2_option_RS2_OPTION_CONFIDENCE_THRESHOLD,
EmitterEnabled = sys::rs2_option_RS2_OPTION_EMITTER_ENABLED,
FramesQueueSize = sys::rs2_option_RS2_OPTION_FRAMES_QUEUE_SIZE,
TotalFrameDrops = sys::rs2_option_RS2_OPTION_TOTAL_FRAME_DROPS,
AutoExposureMode = sys::rs2_option_RS2_OPTION_AUTO_EXPOSURE_MODE,
PowerLineFrequency = sys::rs2_option_RS2_OPTION_POWER_LINE_FREQUENCY,
AsicTemperature = sys::rs2_option_RS2_OPTION_ASIC_TEMPERATURE,
ErrorPollingEnabled = sys::rs2_option_RS2_OPTION_ERROR_POLLING_ENABLED,
ProjectorTemperature = sys::rs2_option_RS2_OPTION_PROJECTOR_TEMPERATURE,
OutputTriggerEnabled = sys::rs2_option_RS2_OPTION_OUTPUT_TRIGGER_ENABLED,
MotionModuleTemperature = sys::rs2_option_RS2_OPTION_MOTION_MODULE_TEMPERATURE,
DepthUnits = sys::rs2_option_RS2_OPTION_DEPTH_UNITS,
EnableMotionCorrection = sys::rs2_option_RS2_OPTION_ENABLE_MOTION_CORRECTION,
AutoExposurePriority = sys::rs2_option_RS2_OPTION_AUTO_EXPOSURE_PRIORITY,
ColorScheme = sys::rs2_option_RS2_OPTION_COLOR_SCHEME,
HistogramEqualizationEnabled = sys::rs2_option_RS2_OPTION_HISTOGRAM_EQUALIZATION_ENABLED,
MinDistance = sys::rs2_option_RS2_OPTION_MIN_DISTANCE,
MaxDistance = sys::rs2_option_RS2_OPTION_MAX_DISTANCE,
TextureSource = sys::rs2_option_RS2_OPTION_TEXTURE_SOURCE,
FilterMagnitude = sys::rs2_option_RS2_OPTION_FILTER_MAGNITUDE,
FilterSmoothAlpha = sys::rs2_option_RS2_OPTION_FILTER_SMOOTH_ALPHA,
FilterSmoothDelta = sys::rs2_option_RS2_OPTION_FILTER_SMOOTH_DELTA,
HolesFill = sys::rs2_option_RS2_OPTION_HOLES_FILL,
StereoBaseline = sys::rs2_option_RS2_OPTION_STEREO_BASELINE,
AutoExposureConvergeStep = sys::rs2_option_RS2_OPTION_AUTO_EXPOSURE_CONVERGE_STEP,
InterCamSyncMode = sys::rs2_option_RS2_OPTION_INTER_CAM_SYNC_MODE,
StreamFilter = sys::rs2_option_RS2_OPTION_STREAM_FILTER,
StreamFormatFilter = sys::rs2_option_RS2_OPTION_STREAM_FORMAT_FILTER,
StreamIndexFilter = sys::rs2_option_RS2_OPTION_STREAM_INDEX_FILTER,
EmitterOnOff = sys::rs2_option_RS2_OPTION_EMITTER_ON_OFF,
ZeroOrderPointX = sys::rs2_option_RS2_OPTION_ZERO_ORDER_POINT_X,
ZeroOrderPointY = sys::rs2_option_RS2_OPTION_ZERO_ORDER_POINT_Y,
LldTemperature = sys::rs2_option_RS2_OPTION_LLD_TEMPERATURE,
McTemperature = sys::rs2_option_RS2_OPTION_MC_TEMPERATURE,
MaTemperature = sys::rs2_option_RS2_OPTION_MA_TEMPERATURE,
HardwarePreset = sys::rs2_option_RS2_OPTION_HARDWARE_PRESET,
GlobalTimeEnabled = sys::rs2_option_RS2_OPTION_GLOBAL_TIME_ENABLED,
ApdTemperature = sys::rs2_option_RS2_OPTION_APD_TEMPERATURE,
EnableMapping = sys::rs2_option_RS2_OPTION_ENABLE_MAPPING,
EnableRelocalization = sys::rs2_option_RS2_OPTION_ENABLE_RELOCALIZATION,
EnablePoseJumping = sys::rs2_option_RS2_OPTION_ENABLE_POSE_JUMPING,
EnableDynamicCalibration = sys::rs2_option_RS2_OPTION_ENABLE_DYNAMIC_CALIBRATION,
DepthOffset = sys::rs2_option_RS2_OPTION_DEPTH_OFFSET,
LedPower = sys::rs2_option_RS2_OPTION_LED_POWER,
ZeroOrderEnabled = sys::rs2_option_RS2_OPTION_ZERO_ORDER_ENABLED,
EnableMapPreservation = sys::rs2_option_RS2_OPTION_ENABLE_MAP_PRESERVATION,
Count = sys::rs2_option_RS2_OPTION_COUNT,
}
impl Rs2Option {
pub fn to_cstr(&self) -> &'static CStr {
unsafe {
let ptr = sys::rs2_option_to_string(*self as sys::rs2_option);
CStr::from_ptr(ptr)
}
}
pub fn to_str(&self) -> &'static str {
self.to_cstr().to_str().unwrap()
}
}
impl ToString for Rs2Option {
fn to_string(&self) -> String {
self.to_str().to_owned()
}
}
/// The enumeration of timestamp domains.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum TimestampDomain {
HardwareClock = sys::rs2_timestamp_domain_RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK,
SystemTime = sys::rs2_timestamp_domain_RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME,
GlobalTime = sys::rs2_timestamp_domain_RS2_TIMESTAMP_DOMAIN_GLOBAL_TIME,
Count = sys::rs2_timestamp_domain_RS2_TIMESTAMP_DOMAIN_COUNT,
}
impl TimestampDomain {
pub fn as_cstr(&self) -> &'static CStr {
unsafe {
let ptr = sys::rs2_timestamp_domain_to_string(*self as sys::rs2_timestamp_domain);
CStr::from_ptr(ptr)
}
}
pub fn as_str(&self) -> &'static str {
self.as_cstr().to_str().unwrap()
}
}
impl ToString for TimestampDomain {
fn to_string(&self) -> String {
self.as_str().to_owned()
}
}
/// The enumeration of metadata kinds of a frame.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum FrameMetaDataValue {
FrameCounter = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_FRAME_COUNTER,
FrameTimestamp = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_FRAME_TIMESTAMP,
SensorTimestamp = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_SENSOR_TIMESTAMP,
ActualExposure = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_ACTUAL_EXPOSURE,
GainLevel = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_GAIN_LEVEL,
AutoExposure = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_AUTO_EXPOSURE,
WhiteBalance = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_WHITE_BALANCE,
TimeOfArrival = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_TIME_OF_ARRIVAL,
Temperature = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_TEMPERATURE,
BackendTimestamp = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_BACKEND_TIMESTAMP,
ActualFps = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_ACTUAL_FPS,
FrameLaserPower = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_FRAME_LASER_POWER,
FrameLaserPowerMode = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_FRAME_LASER_POWER_MODE,
ExposurePriority = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_EXPOSURE_PRIORITY,
ExposureRoiLeft = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_EXPOSURE_ROI_LEFT,
ExposureRoiRight = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_EXPOSURE_ROI_RIGHT,
ExposureRoiTop = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_EXPOSURE_ROI_TOP,
ExposureRoiBottom = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_EXPOSURE_ROI_BOTTOM,
Brightness = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_BRIGHTNESS,
Contrast = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_CONTRAST,
Saturation = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_SATURATION,
Sharpness = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_SHARPNESS,
AutoWhiteBalanceTemperature =
sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_AUTO_WHITE_BALANCE_TEMPERATURE,
BacklightCompensation = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_BACKLIGHT_COMPENSATION,
Hue = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_HUE,
Gamma = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_GAMMA,
ManualWhiteBalance = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_MANUAL_WHITE_BALANCE,
PowerLineFrequency = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_POWER_LINE_FREQUENCY,
LowLightCompensation = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_LOW_LIGHT_COMPENSATION,
FrameEmitterMode = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_FRAME_EMITTER_MODE,
FrameLedPower = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_FRAME_LED_POWER,
Count = sys::rs2_frame_metadata_value_RS2_FRAME_METADATA_COUNT,
}
/// The enumeration of extensions.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Extension {
// sensor
ColorSensor = sys::rs2_extension_RS2_EXTENSION_COLOR_SENSOR,
MotionSensor = sys::rs2_extension_RS2_EXTENSION_MOTION_SENSOR,
FishEyeSensor = sys::rs2_extension_RS2_EXTENSION_FISHEYE_SENSOR,
DepthSensor = sys::rs2_extension_RS2_EXTENSION_DEPTH_SENSOR,
DepthStereoSensor = sys::rs2_extension_RS2_EXTENSION_DEPTH_STEREO_SENSOR,
SoftwareSensor = sys::rs2_extension_RS2_EXTENSION_SOFTWARE_SENSOR,
PoseSensor = sys::rs2_extension_RS2_EXTENSION_POSE_SENSOR,
L500DepthSensor = sys::rs2_extension_RS2_EXTENSION_L500_DEPTH_SENSOR,
Tm2Sensor = sys::rs2_extension_RS2_EXTENSION_TM2_SENSOR,
// frame
VideoFrame = sys::rs2_extension_RS2_EXTENSION_VIDEO_FRAME,
MotionFrame = sys::rs2_extension_RS2_EXTENSION_MOTION_FRAME,
CompositeFrame = sys::rs2_extension_RS2_EXTENSION_COMPOSITE_FRAME,
DepthFrame = sys::rs2_extension_RS2_EXTENSION_DEPTH_FRAME,
DisparityFrame = sys::rs2_extension_RS2_EXTENSION_DISPARITY_FRAME,
PoseFrame = sys::rs2_extension_RS2_EXTENSION_POSE_FRAME,
Points = sys::rs2_extension_RS2_EXTENSION_POINTS,
// filter
DecimationFilter = sys::rs2_extension_RS2_EXTENSION_DECIMATION_FILTER,
ThresholdFilter = sys::rs2_extension_RS2_EXTENSION_THRESHOLD_FILTER,
DisparityFilter = sys::rs2_extension_RS2_EXTENSION_DISPARITY_FILTER,
SpatialFilter = sys::rs2_extension_RS2_EXTENSION_SPATIAL_FILTER,
TemporalFilter = sys::rs2_extension_RS2_EXTENSION_TEMPORAL_FILTER,
HoleFillingFilter = sys::rs2_extension_RS2_EXTENSION_HOLE_FILLING_FILTER,
ZeroOrderFilter = sys::rs2_extension_RS2_EXTENSION_ZERO_ORDER_FILTER,
RecommendedFilters = sys::rs2_extension_RS2_EXTENSION_RECOMMENDED_FILTERS,
// profile
VideoProfile = sys::rs2_extension_RS2_EXTENSION_VIDEO_PROFILE,
MotionProfile = sys::rs2_extension_RS2_EXTENSION_MOTION_PROFILE,
PoseProfile = sys::rs2_extension_RS2_EXTENSION_POSE_PROFILE,
// device
SoftwareDevice = sys::rs2_extension_RS2_EXTENSION_SOFTWARE_DEVICE,
UpdateDevice = sys::rs2_extension_RS2_EXTENSION_UPDATE_DEVICE,
AutoCalibratedDevice = sys::rs2_extension_RS2_EXTENSION_AUTO_CALIBRATED_DEVICE,
// misc
AdvancedMode = sys::rs2_extension_RS2_EXTENSION_ADVANCED_MODE,
Record = sys::rs2_extension_RS2_EXTENSION_RECORD,
Playback = sys::rs2_extension_RS2_EXTENSION_PLAYBACK,
Pose = sys::rs2_extension_RS2_EXTENSION_POSE,
WheelOdometer = sys::rs2_extension_RS2_EXTENSION_WHEEL_ODOMETER,
GlobalTimer = sys::rs2_extension_RS2_EXTENSION_GLOBAL_TIMER,
Updatable = sys::rs2_extension_RS2_EXTENSION_UPDATABLE,
Count = sys::rs2_extension_RS2_EXTENSION_COUNT,
Tm2 = sys::rs2_extension_RS2_EXTENSION_TM2,
Unknown = sys::rs2_extension_RS2_EXTENSION_UNKNOWN,
Debug = sys::rs2_extension_RS2_EXTENSION_DEBUG,
Info = sys::rs2_extension_RS2_EXTENSION_INFO,
Motion = sys::rs2_extension_RS2_EXTENSION_MOTION,
Options = sys::rs2_extension_RS2_EXTENSION_OPTIONS,
Video = sys::rs2_extension_RS2_EXTENSION_VIDEO,
Roi = sys::rs2_extension_RS2_EXTENSION_ROI,
}
/// The enumeration of sensor information.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum CameraInfo {
Name = sys::rs2_camera_info_RS2_CAMERA_INFO_NAME,
SerialNumber = sys::rs2_camera_info_RS2_CAMERA_INFO_SERIAL_NUMBER,
FirmwareVersion = sys::rs2_camera_info_RS2_CAMERA_INFO_FIRMWARE_VERSION,
RecommendedFirmwareVersion = sys::rs2_camera_info_RS2_CAMERA_INFO_RECOMMENDED_FIRMWARE_VERSION,
PhysicalPort = sys::rs2_camera_info_RS2_CAMERA_INFO_PHYSICAL_PORT,
DebugOpCode = sys::rs2_camera_info_RS2_CAMERA_INFO_DEBUG_OP_CODE,
AdvancedMode = sys::rs2_camera_info_RS2_CAMERA_INFO_ADVANCED_MODE,
ProductId = sys::rs2_camera_info_RS2_CAMERA_INFO_PRODUCT_ID,
CameraLocked = sys::rs2_camera_info_RS2_CAMERA_INFO_CAMERA_LOCKED,
UsbTypeDescriptor = sys::rs2_camera_info_RS2_CAMERA_INFO_USB_TYPE_DESCRIPTOR,
ProductLine = sys::rs2_camera_info_RS2_CAMERA_INFO_PRODUCT_LINE,
AsicSerialNumber = sys::rs2_camera_info_RS2_CAMERA_INFO_ASIC_SERIAL_NUMBER,
FirmwareUpdateId = sys::rs2_camera_info_RS2_CAMERA_INFO_FIRMWARE_UPDATE_ID,
Count = sys::rs2_camera_info_RS2_CAMERA_INFO_COUNT,
}
/// The enumeration of all categories of stream.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum StreamKind {
Any = sys::rs2_stream_RS2_STREAM_ANY,
Depth = sys::rs2_stream_RS2_STREAM_DEPTH,
Color = sys::rs2_stream_RS2_STREAM_COLOR,
Infrared = sys::rs2_stream_RS2_STREAM_INFRARED,
Fisheye = sys::rs2_stream_RS2_STREAM_FISHEYE,
Gyro = sys::rs2_stream_RS2_STREAM_GYRO,
Accel = sys::rs2_stream_RS2_STREAM_ACCEL,
Gpio = sys::rs2_stream_RS2_STREAM_GPIO,
Pose = sys::rs2_stream_RS2_STREAM_POSE,
Confidence = sys::rs2_stream_RS2_STREAM_CONFIDENCE,
Count = sys::rs2_stream_RS2_STREAM_COUNT,
}
/// The enumeration of frame data format.
#[repr(i32)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Format {
Any = sys::rs2_format_RS2_FORMAT_ANY,
Yuyv = sys::rs2_format_RS2_FORMAT_YUYV,
Uyvy = sys::rs2_format_RS2_FORMAT_UYVY,
MotionRaw = sys::rs2_format_RS2_FORMAT_MOTION_RAW,
GpioRaw = sys::rs2_format_RS2_FORMAT_GPIO_RAW,
Distance = sys::rs2_format_RS2_FORMAT_DISTANCE,
Mjpeg = sys::rs2_format_RS2_FORMAT_MJPEG,
Inzi = sys::rs2_format_RS2_FORMAT_INZI,
Invi = sys::rs2_format_RS2_FORMAT_INVI,
Count = sys::rs2_format_RS2_FORMAT_COUNT,
_6Dof = sys::rs2_format_RS2_FORMAT_6DOF,
Bgr8 = sys::rs2_format_RS2_FORMAT_BGR8,
Bgra8 = sys::rs2_format_RS2_FORMAT_BGRA8,
Disparity16 = sys::rs2_format_RS2_FORMAT_DISPARITY16,
Disparity32 = sys::rs2_format_RS2_FORMAT_DISPARITY32,
MotionXyz32F = sys::rs2_format_RS2_FORMAT_MOTION_XYZ32F,
Raw8 = sys::rs2_format_RS2_FORMAT_RAW8,
Raw10 = sys::rs2_format_RS2_FORMAT_RAW10,
Raw16 = sys::rs2_format_RS2_FORMAT_RAW16,
Rgb8 = sys::rs2_format_RS2_FORMAT_RGB8,
Rgba8 = sys::rs2_format_RS2_FORMAT_RGBA8,
W10 = sys::rs2_format_RS2_FORMAT_W10,
Xyz32F = sys::rs2_format_RS2_FORMAT_XYZ32F,
Y8 = sys::rs2_format_RS2_FORMAT_Y8,
Y8I = sys::rs2_format_RS2_FORMAT_Y8I,
Y10Bpack = sys::rs2_format_RS2_FORMAT_Y10BPACK,
Y12I = sys::rs2_format_RS2_FORMAT_Y12I,
Y16 = sys::rs2_format_RS2_FORMAT_Y16,
Z16 = sys::rs2_format_RS2_FORMAT_Z16,
}
/// The enumeration of color schemes.
#[repr(usize)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ColorScheme {
Jet = 0,
Classic = 1,
WhiteToBlack = 2,
BlackToWhite = 3,
Bio = 4,
Cold = 5,
Warm = 6,
Quantized = 7,
Pattern = 8,
Hue = 9,
}
/// The enumeration of persistence controls.
#[repr(usize)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum PersistenceControl {
Disabled = 0,
Valid8OutOf8 = 1,
Valid2OutOf3 = 2,
Valid2OutOf4 = 3,
Valid2OutOf8 = 4,
Valid1OutOf2 = 5,
Valid1OutOf5 = 6,
Valid1OutOf8 = 7,
Indefinitely = 8,
}
/// The enumeration of persistence controls.
#[repr(usize)]
#[derive(FromPrimitive, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum HoleFillingMode {
FillFromLeft = 0,
FarestFromAround = 1,
NearestFromAround = 2,
}
| 48.537764 | 100 | 0.796527 |
8f0adcc2a166f9aa366a9c56450850e178d2bbc7 | 6,256 | // Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use std::convert::TryFrom;
use std::sync::Arc;
// io::ipc::write::common::{encoded_batch, DictionaryTracker, EncodedData, IpcWriteOptions}
use common_arrow::arrow::datatypes::SchemaRef as ArrowSchemaRef;
use common_arrow::arrow::io::ipc::write::common::IpcWriteOptions;
use common_arrow::arrow::record_batch::RecordBatch;
use common_arrow::arrow_flight::utils::flight_data_from_arrow_batch;
use common_arrow::arrow_flight::utils::flight_data_from_arrow_schema;
use common_arrow::arrow_flight::utils::flight_data_to_arrow_batch;
use common_arrow::arrow_flight::Ticket;
use common_datablocks::DataBlock;
use common_datavalues::prelude::*;
use common_exception::ErrorCode;
use common_planners::PlanNode;
use common_planners::ScanPlan;
use common_runtime::tokio;
pub use common_store_api::AppendResult;
pub use common_store_api::BlockStream;
pub use common_store_api::DataPartInfo;
pub use common_store_api::ReadAction;
pub use common_store_api::ReadPlanResult;
pub use common_store_api::StorageApi;
pub use common_store_api::TruncateTableResult;
use common_streams::SendableDataBlockStream;
use futures::SinkExt;
use futures::StreamExt;
use tonic::Request;
use crate::action_declare;
use crate::impls::storage_api_impl_utils;
pub use crate::impls::storage_api_impl_utils::get_meta;
use crate::RequestFor;
use crate::StoreClient;
use crate::StoreDoAction;
use crate::StoreDoGet;
#[derive(serde::Serialize, serde::Deserialize, Clone, Debug)]
pub struct ReadPlanAction {
pub scan_plan: ScanPlan,
}
action_declare!(ReadPlanAction, ReadPlanResult, StoreDoAction::ReadPlan);
#[derive(serde::Serialize, serde::Deserialize, Clone, Debug)]
pub struct TruncateTableAction {
pub db: String,
pub table: String,
}
action_declare!(
TruncateTableAction,
TruncateTableResult,
StoreDoAction::TruncateTable
);
#[async_trait::async_trait]
impl StorageApi for StoreClient {
async fn read_plan(
&mut self,
db_name: String,
tbl_name: String,
scan_plan: &ScanPlan,
) -> common_exception::Result<ReadPlanResult> {
let mut plan = scan_plan.clone();
plan.schema_name = format!("{}/{}", db_name, tbl_name);
let plan = ReadPlanAction { scan_plan: plan };
self.do_action(plan).await
}
async fn read_partition(
&mut self,
schema: DataSchemaRef,
read_action: &ReadAction,
) -> common_exception::Result<SendableDataBlockStream> {
let cmd = StoreDoGet::Read(read_action.clone());
let mut req = tonic::Request::<Ticket>::from(&cmd);
req.set_timeout(self.timeout);
let res = self.client.do_get(req).await?.into_inner();
let mut arrow_schema: ArrowSchemaRef = Arc::new(schema.to_arrow());
// replace table schema with projected schema
// TODO tweak method signature, only ReadDataSourcePlan are supposed to be passed in
if let PlanNode::ReadSource(plan) = &read_action.push_down {
arrow_schema = Arc::new(plan.schema.to_arrow())
}
let res_stream = res.map(move |item| {
item.map_err(|status| ErrorCode::TokioError(status.to_string()))
.and_then(|item| {
flight_data_to_arrow_batch(&item, arrow_schema.clone(), true, &[])
.map_err(ErrorCode::from)
})
.and_then(DataBlock::try_from)
});
Ok(Box::pin(res_stream))
}
async fn append_data(
&mut self,
db_name: String,
tbl_name: String,
scheme_ref: DataSchemaRef,
mut block_stream: BlockStream,
) -> common_exception::Result<AppendResult> {
let ipc_write_opt = IpcWriteOptions::default();
let arrow_schema: ArrowSchemaRef = Arc::new(scheme_ref.to_arrow());
let flight_schema = flight_data_from_arrow_schema(arrow_schema.as_ref(), &ipc_write_opt);
let (mut tx, flight_stream) = futures::channel::mpsc::channel(100);
tx.send(flight_schema)
.await
.map_err(|send_err| ErrorCode::BrokenChannel(send_err.to_string()))?;
tokio::spawn(async move {
while let Some(block) = block_stream.next().await {
log::info!("next data block");
match RecordBatch::try_from(block) {
Ok(batch) => {
if let Err(_e) = tx
.send(flight_data_from_arrow_batch(&batch, &ipc_write_opt).1)
.await
{
log::error!("failed to send flight-data to downstream, breaking out");
break;
}
}
Err(e) => {
log::error!(
"failed to convert DataBlock to RecordBatch , breaking out, {:?}",
e
);
break;
}
}
}
});
let mut req = Request::new(flight_stream);
let meta = req.metadata_mut();
storage_api_impl_utils::put_meta(meta, &db_name, &tbl_name);
let res = self.client.do_put(req).await?;
match res.into_inner().message().await? {
Some(res) => Ok(serde_json::from_slice(&res.app_metadata)?),
None => Err(ErrorCode::UnknownException("Put result is empty")),
}
}
async fn truncate(
&mut self,
db: String,
table: String,
) -> common_exception::Result<TruncateTableResult> {
self.do_action(TruncateTableAction { db, table }).await
}
}
| 36.372093 | 98 | 0.636509 |
fb99ff2fa59408756ac46c3bff41d1429647f262 | 13,603 | // Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use std::fmt;
use std::fs::{self, File, OpenOptions};
use std::io::Write;
use std::path::{Path, PathBuf};
use crc::crc32::{self, Hasher32};
use kvproto::import_sstpb::*;
use uuid::Uuid;
use engine::rocks::util::{get_cf_handle, prepare_sst_for_ingestion, validate_sst_for_ingestion};
use engine::rocks::{IngestExternalFileOptions, DB};
use super::{Error, Result};
/// SSTImporter manages SST files that are waiting for ingesting.
pub struct SSTImporter {
dir: ImportDir,
}
impl SSTImporter {
pub fn new<P: AsRef<Path>>(root: P) -> Result<SSTImporter> {
Ok(SSTImporter {
dir: ImportDir::new(root)?,
})
}
pub fn create(&self, meta: &SSTMeta) -> Result<ImportFile> {
match self.dir.create(meta) {
Ok(f) => {
info!("create"; "file" => ?f);
Ok(f)
}
Err(e) => {
error!("create failed"; "meta" => ?meta, "err" => %e);
Err(e)
}
}
}
pub fn delete(&self, meta: &SSTMeta) -> Result<()> {
match self.dir.delete(meta) {
Ok(path) => {
info!("delete"; "path" => ?path);
Ok(())
}
Err(e) => {
error!("delete failed"; "meta" => ?meta, "err" => %e);
Err(e)
}
}
}
pub fn ingest(&self, meta: &SSTMeta, db: &DB) -> Result<()> {
match self.dir.ingest(meta, db) {
Ok(_) => {
info!("ingest"; "meta" => ?meta);
Ok(())
}
Err(e) => {
error!("ingest failed"; "meta" => ?meta, "err" => %e);
Err(e)
}
}
}
pub fn list_ssts(&self) -> Result<Vec<SSTMeta>> {
self.dir.list_ssts()
}
}
/// ImportDir is responsible for operating SST files and related path
/// calculations.
///
/// The file being written is stored in `$root/.temp/$file_name`. After writing
/// is completed, the file is moved to `$root/$file_name`. The file generated
/// from the ingestion process will be placed in `$root/.clone/$file_name`.
///
/// TODO: Add size and rate limit.
pub struct ImportDir {
root_dir: PathBuf,
temp_dir: PathBuf,
clone_dir: PathBuf,
}
impl ImportDir {
const TEMP_DIR: &'static str = ".temp";
const CLONE_DIR: &'static str = ".clone";
fn new<P: AsRef<Path>>(root: P) -> Result<ImportDir> {
let root_dir = root.as_ref().to_owned();
let temp_dir = root_dir.join(Self::TEMP_DIR);
let clone_dir = root_dir.join(Self::CLONE_DIR);
if temp_dir.exists() {
fs::remove_dir_all(&temp_dir)?;
}
if clone_dir.exists() {
fs::remove_dir_all(&clone_dir)?;
}
fs::create_dir_all(&temp_dir)?;
fs::create_dir_all(&clone_dir)?;
Ok(ImportDir {
root_dir,
temp_dir,
clone_dir,
})
}
fn join(&self, meta: &SSTMeta) -> Result<ImportPath> {
let file_name = sst_meta_to_path(meta)?;
let save_path = self.root_dir.join(&file_name);
let temp_path = self.temp_dir.join(&file_name);
let clone_path = self.clone_dir.join(&file_name);
Ok(ImportPath {
save: save_path,
temp: temp_path,
clone: clone_path,
})
}
fn create(&self, meta: &SSTMeta) -> Result<ImportFile> {
let path = self.join(meta)?;
if path.save.exists() {
return Err(Error::FileExists(path.save));
}
ImportFile::create(meta.clone(), path)
}
fn delete(&self, meta: &SSTMeta) -> Result<ImportPath> {
let path = self.join(meta)?;
if path.save.exists() {
fs::remove_file(&path.save)?;
}
if path.temp.exists() {
fs::remove_file(&path.temp)?;
}
if path.clone.exists() {
fs::remove_file(&path.clone)?;
}
Ok(path)
}
fn ingest(&self, meta: &SSTMeta, db: &DB) -> Result<()> {
let path = self.join(meta)?;
let cf = meta.get_cf_name();
prepare_sst_for_ingestion(&path.save, &path.clone)?;
validate_sst_for_ingestion(db, cf, &path.clone, meta.get_length(), meta.get_crc32())?;
let handle = get_cf_handle(db, cf)?;
let mut opts = IngestExternalFileOptions::new();
opts.move_files(true);
db.ingest_external_file_cf(handle, &opts, &[path.clone.to_str().unwrap()])?;
Ok(())
}
fn list_ssts(&self) -> Result<Vec<SSTMeta>> {
let mut ssts = Vec::new();
for e in fs::read_dir(&self.root_dir)? {
let e = e?;
if !e.file_type()?.is_file() {
continue;
}
let path = e.path();
match path_to_sst_meta(&path) {
Ok(sst) => ssts.push(sst),
Err(e) => {
error!("path_to_sst_meta failed"; "path" => %path.to_str().unwrap(), "err" => %e)
}
}
}
Ok(ssts)
}
}
#[derive(Clone)]
pub struct ImportPath {
// The path of the file that has been uploaded.
save: PathBuf,
// The path of the file that is being uploaded.
temp: PathBuf,
// The path of the file that is going to be ingested.
clone: PathBuf,
}
impl fmt::Debug for ImportPath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ImportPath")
.field("save", &self.save)
.field("temp", &self.temp)
.field("clone", &self.clone)
.finish()
}
}
/// ImportFile is used to handle the writing and verification of SST files.
pub struct ImportFile {
meta: SSTMeta,
path: ImportPath,
file: Option<File>,
digest: crc32::Digest,
}
impl ImportFile {
fn create(meta: SSTMeta, path: ImportPath) -> Result<ImportFile> {
let file = OpenOptions::new()
.write(true)
.create_new(true)
.open(&path.temp)?;
Ok(ImportFile {
meta,
path,
file: Some(file),
digest: crc32::Digest::new(crc32::IEEE),
})
}
pub fn append(&mut self, data: &[u8]) -> Result<()> {
self.file.as_mut().unwrap().write_all(data)?;
self.digest.write(data);
Ok(())
}
pub fn finish(&mut self) -> Result<()> {
self.validate()?;
self.file.take().unwrap().sync_all()?;
if self.path.save.exists() {
return Err(Error::FileExists(self.path.save.clone()));
}
fs::rename(&self.path.temp, &self.path.save)?;
Ok(())
}
fn cleanup(&mut self) -> Result<()> {
self.file.take();
if self.path.temp.exists() {
fs::remove_file(&self.path.temp)?;
}
Ok(())
}
fn validate(&self) -> Result<()> {
let crc32 = self.digest.sum32();
let expect = self.meta.get_crc32();
if crc32 != expect {
let reason = format!("crc32 {}, expect {}", crc32, expect);
return Err(Error::FileCorrupted(self.path.temp.clone(), reason));
}
let f = self.file.as_ref().unwrap();
let length = f.metadata()?.len();
let expect = self.meta.get_length();
if length != expect {
let reason = format!("length {}, expect {}", length, expect);
return Err(Error::FileCorrupted(self.path.temp.clone(), reason));
}
Ok(())
}
}
impl Drop for ImportFile {
fn drop(&mut self) {
if let Err(e) = self.cleanup() {
warn!("cleanup failed"; "file" => ?self, "err" => %e);
}
}
}
impl fmt::Debug for ImportFile {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ImportFile")
.field("meta", &self.meta)
.field("path", &self.path)
.finish()
}
}
const SST_SUFFIX: &str = ".sst";
fn sst_meta_to_path(meta: &SSTMeta) -> Result<PathBuf> {
Ok(PathBuf::from(format!(
"{}_{}_{}_{}{}",
Uuid::from_bytes(meta.get_uuid())?,
meta.get_region_id(),
meta.get_region_epoch().get_conf_ver(),
meta.get_region_epoch().get_version(),
SST_SUFFIX,
)))
}
fn path_to_sst_meta<P: AsRef<Path>>(path: P) -> Result<SSTMeta> {
let path = path.as_ref();
let file_name = match path.file_name().and_then(|n| n.to_str()) {
Some(name) => name,
None => return Err(Error::InvalidSSTPath(path.to_owned())),
};
// A valid file name should be in the format:
// "{uuid}_{region_id}_{region_epoch.conf_ver}_{region_epoch.version}.sst"
if !file_name.ends_with(SST_SUFFIX) {
return Err(Error::InvalidSSTPath(path.to_owned()));
}
let elems: Vec<_> = file_name.trim_end_matches(SST_SUFFIX).split('_').collect();
if elems.len() != 4 {
return Err(Error::InvalidSSTPath(path.to_owned()));
}
let mut meta = SSTMeta::new();
let uuid = Uuid::parse_str(elems[0])?;
meta.set_uuid(uuid.as_bytes().to_vec());
meta.set_region_id(elems[1].parse()?);
meta.mut_region_epoch().set_conf_ver(elems[2].parse()?);
meta.mut_region_epoch().set_version(elems[3].parse()?);
Ok(meta)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::import::test_helpers::*;
use engine::rocks::util::new_engine;
use tempfile::Builder;
#[test]
fn test_import_dir() {
let temp_dir = Builder::new().prefix("test_import_dir").tempdir().unwrap();
let dir = ImportDir::new(temp_dir.path()).unwrap();
let mut meta = SSTMeta::new();
meta.set_uuid(Uuid::new_v4().as_bytes().to_vec());
let path = dir.join(&meta).unwrap();
// Test ImportDir::create()
{
let _file = dir.create(&meta).unwrap();
assert!(path.temp.exists());
assert!(!path.save.exists());
assert!(!path.clone.exists());
// Cannot create the same file again.
assert!(dir.create(&meta).is_err());
}
// Test ImportDir::delete()
{
File::create(&path.temp).unwrap();
File::create(&path.save).unwrap();
File::create(&path.clone).unwrap();
dir.delete(&meta).unwrap();
assert!(!path.temp.exists());
assert!(!path.save.exists());
assert!(!path.clone.exists());
}
// Test ImportDir::ingest()
let db_path = temp_dir.path().join("db");
let db = new_engine(db_path.to_str().unwrap(), None, &["default"], None).unwrap();
let cases = vec![(0, 10), (5, 15), (10, 20), (0, 100)];
let mut ingested = Vec::new();
for (i, &range) in cases.iter().enumerate() {
let path = temp_dir.path().join(format!("{}.sst", i));
let (meta, data) = gen_sst_file(&path, range);
let mut f = dir.create(&meta).unwrap();
f.append(&data).unwrap();
f.finish().unwrap();
dir.ingest(&meta, &db).unwrap();
check_db_range(&db, range);
ingested.push(meta);
}
let ssts = dir.list_ssts().unwrap();
assert_eq!(ssts.len(), ingested.len());
for sst in &ssts {
ingested
.iter()
.find(|s| s.get_uuid() == sst.get_uuid())
.unwrap();
dir.delete(sst).unwrap();
}
assert!(dir.list_ssts().unwrap().is_empty());
}
#[test]
fn test_import_file() {
let temp_dir = Builder::new().prefix("test_import_file").tempdir().unwrap();
let path = ImportPath {
save: temp_dir.path().join("save"),
temp: temp_dir.path().join("temp"),
clone: temp_dir.path().join("clone"),
};
let data = b"test_data";
let crc32 = calc_data_crc32(data);
let mut meta = SSTMeta::new();
{
let mut f = ImportFile::create(meta.clone(), path.clone()).unwrap();
// Cannot create the same file again.
assert!(ImportFile::create(meta.clone(), path.clone()).is_err());
f.append(data).unwrap();
// Invalid crc32 and length.
assert!(f.finish().is_err());
assert!(path.temp.exists());
assert!(!path.save.exists());
}
meta.set_crc32(crc32);
{
let mut f = ImportFile::create(meta.clone(), path.clone()).unwrap();
f.append(data).unwrap();
// Invalid length.
assert!(f.finish().is_err());
}
meta.set_length(data.len() as u64);
{
let mut f = ImportFile::create(meta.clone(), path.clone()).unwrap();
f.append(data).unwrap();
f.finish().unwrap();
assert!(!path.temp.exists());
assert!(path.save.exists());
}
}
#[test]
fn test_sst_meta_to_path() {
let mut meta = SSTMeta::new();
let uuid = Uuid::new_v4();
meta.set_uuid(uuid.as_bytes().to_vec());
meta.set_region_id(1);
meta.mut_region_epoch().set_conf_ver(2);
meta.mut_region_epoch().set_version(3);
let path = sst_meta_to_path(&meta).unwrap();
let expected_path = format!("{}_1_2_3.sst", uuid);
assert_eq!(path.to_str().unwrap(), &expected_path);
let new_meta = path_to_sst_meta(path).unwrap();
assert_eq!(meta, new_meta);
}
}
| 29.962555 | 101 | 0.529148 |
0323bc8fbcddbeff43285c270d22ab4423486689 | 10,042 | #![allow(unused)]
use typescript_definitions::{TypeScriptify, TypeScriptifyTrait, TypescriptDefinition};
use serde::Serialize;
use std::borrow::Cow;
// use serde::de::value::Error;
use insta::assert_snapshot_matches;
use wasm_bindgen::prelude::*;
#[cfg(feature = "test")]
#[test]
fn unit_struct() {
#[derive(Serialize, TypescriptDefinition)]
struct Unit;
assert_snapshot_matches!(Unit___typescript_definition(),@"export type Unit = {};")
}
#[cfg(feature = "test")]
#[test]
fn newtype_struct() {
#[derive(Serialize, TypescriptDefinition)]
struct Newtype(i64);
assert_snapshot_matches!(
Newtype___typescript_definition(),
@"export type Newtype = number;"
)
}
#[cfg(feature = "test")]
#[test]
fn tuple_struct() {
#[derive(Serialize, TypescriptDefinition)]
struct Tuple(i64, String);
assert_snapshot_matches!(
Tuple___typescript_definition(),
@"export type Tuple = [ number , string ];"
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_borrowed_fields() {
#[derive(Serialize, TypescriptDefinition, TypeScriptify)]
struct Borrow<'a> {
raw: &'a str,
cow: Cow<'a, str>,
}
assert_snapshot_matches!(
Borrow___typescript_definition(),
@"export type Borrow = { raw: string; cow: string };"
)
}
#[cfg(feature = "test")]
#[test]
fn struct_point_with_field_rename() {
#[derive(Serialize, TypescriptDefinition)]
struct Point {
#[serde(rename = "X")]
x: i64,
#[serde(rename = "Y")]
y: i64,
}
assert_snapshot_matches!(
Point___typescript_definition(),
@"export type Point = { X: number; Y: number };"
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_array() {
#[derive(Serialize, TypescriptDefinition)]
struct Point {
x: [i64; 5],
y: i64,
z: Option<f64>,
}
assert_snapshot_matches!(
Point___typescript_definition(),
@"export type Point = { x: number[]; y: number; z: number | null };"
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_tuple() {
use std::collections::{HashMap, HashSet};
#[derive(Serialize, TypescriptDefinition)]
struct Point2 {
x: (i64, String, [u128; 5]),
y: i64,
v: Vec<i32>,
z: HashMap<String, i32>,
}
assert_snapshot_matches!(
Point2___typescript_definition(),
@"export type Point2 = { x: [ number , string , number[] ]; y: number; v: number[]; z: { [ key: string ]: number } };"
)
}
#[cfg(feature = "test")]
#[test]
fn enum_with_renamed_newtype_variants() {
#[derive(Serialize, TypescriptDefinition)]
#[serde(tag = "kind")]
enum Enum {
#[serde(rename = "Var1")]
V1(bool),
#[serde(rename = "Var2")]
V2(i64),
#[serde(rename = "Var3")]
V3(String),
#[serde(skip)]
Internal(i32),
}
assert_snapshot_matches!(
Enum___typescript_definition(),
@r###"export type Enum =
| { kind: "Var1"; fields: boolean }
| { kind: "Var2"; fields: number }
| { kind: "Var3"; fields: string };"###
)
}
#[cfg(feature = "test")]
#[test]
fn enum_with_unit_variants() {
#[derive(Serialize, TypescriptDefinition)]
enum Enum {
V1,
V2,
V3,
}
assert_snapshot_matches!(
Enum___typescript_definition(),
@r###"export enum Enum { V1 = "V1" , V2 = "V2" , V3 = "V3" };"###
)
}
#[cfg(feature = "test")]
#[test]
fn enum_with_tuple_variants() {
#[derive(Serialize, TypescriptDefinition)]
#[serde(tag = "kind", content = "fields")]
enum Enum {
V1(i64, String),
V2(i64, bool),
V3(i64, u64),
}
assert_snapshot_matches!(
Enum___typescript_definition(),
@r###"export type Enum =
| { kind: "V1"; fields: [ number , string ] }
| { kind: "V2"; fields: [ number , boolean ] }
| { kind: "V3"; fields: [ number , number ] };"###
)
}
#[cfg(feature = "test")]
#[test]
fn enum_with_struct_variants_and_renamed_fields() {
#[derive(Serialize, TypescriptDefinition)]
#[serde(tag = "kind")]
enum Enum {
#[allow(unused)]
V1 {
#[serde(rename = "Foo")]
foo: bool,
},
#[allow(unused)]
V2 {
#[serde(rename = "Bar")]
bar: i64,
#[serde(rename = "Baz")]
baz: u64,
},
#[allow(unused)]
V3 {
#[serde(rename = "Quux")]
quux: String,
},
}
assert_snapshot_matches!(
Enum___typescript_definition(),
@r###"export type Enum =
| { kind: "V1"; Foo: boolean }
| { kind: "V2"; Bar: number; Baz: number }
| { kind: "V3"; Quux: string };"###
)
}
#[cfg(feature = "test")]
#[test]
fn enum_with_struct_and_tags() {
#[derive(Serialize, TypescriptDefinition)]
#[serde(tag = "id", content = "content")]
enum Enum {
V1 { foo: bool },
V2 { bar: i64, baz: u64 },
V3 { quux: String },
}
assert_snapshot_matches!(
Enum___typescript_definition(),
@r###"export type Enum =
| { id: "V1"; content: { foo: boolean } }
| { id: "V2"; content: { bar: number; baz: number } }
| { id: "V3"; content: { quux: string } };"###
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_attr_refering_to_other_type() {
#[derive(Serialize)]
struct B<T> {
q: T,
}
#[derive(Serialize, TypescriptDefinition)]
struct A {
x: f64, /* simple */
b: B<f64>,
#[serde(rename = "cnew")]
c: Result<i32, &'static str>,
d: Result<Option<i32>, String>,
}
assert_snapshot_matches!(
A___typescript_definition(),
@"export type A = { x: number; b: B<number>; cnew: { Ok: number } | { Err: string }; d: { Ok: number | null } | { Err: string } };"
)
}
#[test]
fn struct_typescriptify() {
#[derive(TypeScriptify)]
struct A {
x: f64, /* simple */
c: Result<i32, &'static str>,
d: Result<Option<i32>, String>,
}
assert_snapshot_matches!(
A::type_script_ify(),
@"export type A = { x: number; c: { Ok: number } | { Err: string }; d: { Ok: number | null } | { Err: string } };"
)
}
#[test]
fn cow_as_pig() {
use std::borrow::Cow as Pig;
#[derive(TypeScriptify)]
struct S<'a> {
pig: Pig<'a, str>,
cow: ::std::borrow::Cow<'a, str>,
}
assert_snapshot_matches!(
S::type_script_ify(),
@"export type S = { pig: Pig<string>; cow: string };"
)
}
#[test]
fn unit_enum_is_enum() {
#[derive(TypeScriptify)]
enum Color {
Red,
Green,
Blue,
}
assert_snapshot_matches!(
Color::type_script_ify(),
@r###"export enum Color { Red = "Red" , Green = "Green" , Blue = "Blue" };"###
)
}
#[test]
fn struct_has_function() {
#[derive(TypeScriptify)]
struct API<T> {
key: i32,
a: T,
get: fn(arg: &i32) -> String,
get2: Fn(T, i32) -> Option<i32>,
}
assert_snapshot_matches!(
API::<i32>::type_script_ify(),
@"export type API<T> = { key: number; a: T; get: ( arg: number ) => string; get2: ( T , number ) => number | null };"
)
}
#[test]
fn struct_with_traitbounds() {
use std::fmt::Display;
#[derive(TypeScriptify)]
struct API<T: Display + Send> {
key: i32,
a: T,
}
assert_snapshot_matches!(
API::<i32>::type_script_ify(),
@"export type API<T> = { key: number; a: T };"
)
}
#[test]
fn struct_with_serde_skip() {
#[derive(Serialize, TypeScriptify)]
struct S {
key: i32,
aa: i32,
#[serde(skip)]
b: f64,
}
assert_snapshot_matches!(
S::type_script_ify(),
@"export type S = { key: number; aa: number };"
)
}
#[test]
fn enum_with_serde_skip() {
#[derive(Serialize, TypeScriptify)]
#[serde(tag = "kind", content = "fields")]
enum S {
A,
E {
key: i32,
a: i32,
#[serde(skip)]
b: f64,
},
F(i32, #[serde(skip)] f64, String),
#[serde(skip)]
Z,
}
assert_snapshot_matches!(
S::type_script_ify(),
@r###"export type S =
| { kind: "A" }
| { kind: "E"; fields: { key: number; a: number } }
| { kind: "F"; fields: [ number , string ] };"###
)
}
#[test]
fn struct_with_phantom_data_skip() {
use std::marker::PhantomData;
#[derive(Serialize, TypeScriptify)]
struct S {
key: i32,
a: i32,
b: PhantomData<String>,
}
assert_snapshot_matches!(
S::type_script_ify(),
@"export type S = { key: number; a: number };"
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_pointers_and_slices() {
#[derive(Serialize, TypescriptDefinition)]
/// This is a doc comment
/// on multiple lines.
struct Pointers<'a> {
keys: &'a [String],
// a_ptr: * const i32,
// #[serde(with="serde_bytes")]
buffer: &'a [u8],
buffer2: Vec<u8>,
}
assert_snapshot_matches!(
Pointers___typescript_definition(),
@r###"// This is a doc comment
// on multiple lines.
export type Pointers = { keys: string[]; buffer: number[]; buffer2: number[] };"###
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_one_field_is_transparent() {
#[derive(Serialize, TypescriptDefinition)]
#[serde(transparent)]
struct One {
a: i32,
}
assert_snapshot_matches!(
One___typescript_definition(),
@"export type One = number;"
)
}
#[cfg(feature = "test")]
#[test]
fn struct_with_two_fields_is_transparent_with_skip() {
#[derive(Serialize, TypescriptDefinition)]
#[serde(transparent)]
struct TwoSkip {
a: i32,
#[serde(skip)]
b: i32,
}
assert_snapshot_matches!(
TwoSkip___typescript_definition(),
@"export type TwoSkip = number;"
)
}
| 23.462617 | 139 | 0.547998 |
ace5ba26fdbaa9d3196daa34eb953b7e2e13d105 | 34,663 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Contains infrastructure for configuring the compiler, including parsing
//! command line options.
use driver::{early_error, early_warn};
use driver::driver;
use driver::session::Session;
use back;
use back::write;
use back::target_strs;
use back::{arm, x86, x86_64, mips, mipsel};
use lint;
use syntax::abi;
use syntax::ast;
use syntax::ast::{IntTy, UintTy};
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::diagnostic::{ColorConfig, Auto, Always, Never};
use syntax::parse;
use syntax::parse::token::InternedString;
use std::collections::HashMap;
use std::collections::hashmap::{Occupied, Vacant};
use getopts::{optopt, optmulti, optflag, optflagopt};
use getopts;
use std::cell::{RefCell};
use std::fmt;
use llvm;
pub struct Config {
pub os: abi::Os,
pub arch: abi::Architecture,
pub target_strs: target_strs::t,
pub int_type: IntTy,
pub uint_type: UintTy,
}
#[deriving(Clone, PartialEq)]
pub enum OptLevel {
No, // -O0
Less, // -O1
Default, // -O2
Aggressive // -O3
}
#[deriving(Clone, PartialEq)]
pub enum DebugInfoLevel {
NoDebugInfo,
LimitedDebugInfo,
FullDebugInfo,
}
#[deriving(Clone)]
pub struct Options {
// The crate config requested for the session, which may be combined
// with additional crate configurations during the compile process
pub crate_types: Vec<CrateType>,
pub gc: bool,
pub optimize: OptLevel,
pub debuginfo: DebugInfoLevel,
pub lint_opts: Vec<(String, lint::Level)>,
pub describe_lints: bool,
pub output_types: Vec<back::write::OutputType> ,
// This was mutable for rustpkg, which updates search paths based on the
// parsed code. It remains mutable in case its replacements wants to use
// this.
pub addl_lib_search_paths: RefCell<Vec<Path>>,
pub maybe_sysroot: Option<Path>,
pub target_triple: String,
// User-specified cfg meta items. The compiler itself will add additional
// items to the crate config, and during parsing the entire crate config
// will be added to the crate AST node. This should not be used for
// anything except building the full crate config prior to parsing.
pub cfg: ast::CrateConfig,
pub test: bool,
pub parse_only: bool,
pub no_trans: bool,
pub no_analysis: bool,
pub debugging_opts: u64,
/// Whether to write dependency files. It's (enabled, optional filename).
pub write_dependency_info: (bool, Option<Path>),
/// Crate id-related things to maybe print. It's (crate_name, crate_file_name).
pub print_metas: (bool, bool),
pub cg: CodegenOptions,
pub color: ColorConfig,
pub externs: HashMap<String, Vec<String>>,
pub crate_name: Option<String>,
/// An optional name to use as the crate for std during std injection,
/// written `extern crate std = "name"`. Default to "std". Used by
/// out-of-tree drivers.
pub alt_std_name: Option<String>
}
/// Some reasonable defaults
pub fn basic_options() -> Options {
Options {
crate_types: Vec::new(),
gc: false,
optimize: No,
debuginfo: NoDebugInfo,
lint_opts: Vec::new(),
describe_lints: false,
output_types: Vec::new(),
addl_lib_search_paths: RefCell::new(Vec::new()),
maybe_sysroot: None,
target_triple: driver::host_triple().to_string(),
cfg: Vec::new(),
test: false,
parse_only: false,
no_trans: false,
no_analysis: false,
debugging_opts: 0,
write_dependency_info: (false, None),
print_metas: (false, false),
cg: basic_codegen_options(),
color: Auto,
externs: HashMap::new(),
crate_name: None,
alt_std_name: None,
}
}
// The type of entry function, so
// users can have their own entry
// functions that don't start a
// scheduler
#[deriving(PartialEq)]
pub enum EntryFnType {
EntryMain,
EntryStart,
EntryNone,
}
#[deriving(PartialEq, PartialOrd, Clone, Ord, Eq, Hash)]
pub enum CrateType {
CrateTypeExecutable,
CrateTypeDylib,
CrateTypeRlib,
CrateTypeStaticlib,
}
macro_rules! debugging_opts(
([ $opt:ident ] $cnt:expr ) => (
pub const $opt: u64 = 1 << $cnt;
);
([ $opt:ident, $($rest:ident),* ] $cnt:expr ) => (
pub const $opt: u64 = 1 << $cnt;
debugging_opts!([ $($rest),* ] $cnt + 1)
)
)
debugging_opts!(
[
VERBOSE,
TIME_PASSES,
COUNT_LLVM_INSNS,
TIME_LLVM_PASSES,
TRANS_STATS,
ASM_COMMENTS,
NO_VERIFY,
BORROWCK_STATS,
NO_LANDING_PADS,
DEBUG_LLVM,
SHOW_SPAN,
COUNT_TYPE_SIZES,
META_STATS,
GC,
PRINT_LINK_ARGS,
PRINT_LLVM_PASSES,
AST_JSON,
AST_JSON_NOEXPAND,
LS,
SAVE_ANALYSIS,
FLOWGRAPH_PRINT_LOANS,
FLOWGRAPH_PRINT_MOVES,
FLOWGRAPH_PRINT_ASSIGNS,
FLOWGRAPH_PRINT_ALL
]
0
)
pub fn debugging_opts_map() -> Vec<(&'static str, &'static str, u64)> {
vec!(("verbose", "in general, enable more debug printouts", VERBOSE),
("time-passes", "measure time of each rustc pass", TIME_PASSES),
("count-llvm-insns", "count where LLVM \
instrs originate", COUNT_LLVM_INSNS),
("time-llvm-passes", "measure time of each LLVM pass",
TIME_LLVM_PASSES),
("trans-stats", "gather trans statistics", TRANS_STATS),
("asm-comments", "generate comments into the assembly (may change behavior)",
ASM_COMMENTS),
("no-verify", "skip LLVM verification", NO_VERIFY),
("borrowck-stats", "gather borrowck statistics", BORROWCK_STATS),
("no-landing-pads", "omit landing pads for unwinding",
NO_LANDING_PADS),
("debug-llvm", "enable debug output from LLVM", DEBUG_LLVM),
("show-span", "show spans for compiler debugging", SHOW_SPAN),
("count-type-sizes", "count the sizes of aggregate types",
COUNT_TYPE_SIZES),
("meta-stats", "gather metadata statistics", META_STATS),
("print-link-args", "Print the arguments passed to the linker",
PRINT_LINK_ARGS),
("gc", "Garbage collect shared data (experimental)", GC),
("print-llvm-passes",
"Prints the llvm optimization passes being run",
PRINT_LLVM_PASSES),
("ast-json", "Print the AST as JSON and halt", AST_JSON),
("ast-json-noexpand", "Print the pre-expansion AST as JSON and halt", AST_JSON_NOEXPAND),
("ls", "List the symbols defined by a library crate", LS),
("save-analysis", "Write syntax and type analysis information \
in addition to normal output", SAVE_ANALYSIS),
("flowgraph-print-loans", "Include loan analysis data in \
--pretty flowgraph output", FLOWGRAPH_PRINT_LOANS),
("flowgraph-print-moves", "Include move analysis data in \
--pretty flowgraph output", FLOWGRAPH_PRINT_MOVES),
("flowgraph-print-assigns", "Include assignment analysis data in \
--pretty flowgraph output", FLOWGRAPH_PRINT_ASSIGNS),
("flowgraph-print-all", "Include all dataflow analysis data in \
--pretty flowgraph output", FLOWGRAPH_PRINT_ALL))
}
#[deriving(Clone)]
pub enum Passes {
SomePasses(Vec<String>),
AllPasses,
}
impl Passes {
pub fn is_empty(&self) -> bool {
match *self {
SomePasses(ref v) => v.is_empty(),
AllPasses => false,
}
}
}
/// Declare a macro that will define all CodegenOptions fields and parsers all
/// at once. The goal of this macro is to define an interface that can be
/// programmatically used by the option parser in order to initialize the struct
/// without hardcoding field names all over the place.
///
/// The goal is to invoke this macro once with the correct fields, and then this
/// macro generates all necessary code. The main gotcha of this macro is the
/// cgsetters module which is a bunch of generated code to parse an option into
/// its respective field in the struct. There are a few hand-written parsers for
/// parsing specific types of values in this module.
macro_rules! cgoptions(
($($opt:ident : $t:ty = ($init:expr, $parse:ident, $desc:expr)),* ,) =>
(
#[deriving(Clone)]
pub struct CodegenOptions { $(pub $opt: $t),* }
pub fn basic_codegen_options() -> CodegenOptions {
CodegenOptions { $($opt: $init),* }
}
pub type CodegenSetter = fn(&mut CodegenOptions, v: Option<&str>) -> bool;
pub const CG_OPTIONS: &'static [(&'static str, CodegenSetter,
&'static str)] =
&[ $( (stringify!($opt), cgsetters::$opt, $desc) ),* ];
mod cgsetters {
use super::{CodegenOptions, Passes, SomePasses, AllPasses};
$(
pub fn $opt(cg: &mut CodegenOptions, v: Option<&str>) -> bool {
$parse(&mut cg.$opt, v)
}
)*
fn parse_bool(slot: &mut bool, v: Option<&str>) -> bool {
match v {
Some(..) => false,
None => { *slot = true; true }
}
}
fn parse_opt_string(slot: &mut Option<String>, v: Option<&str>) -> bool {
match v {
Some(s) => { *slot = Some(s.to_string()); true },
None => false,
}
}
fn parse_string(slot: &mut String, v: Option<&str>) -> bool {
match v {
Some(s) => { *slot = s.to_string(); true },
None => false,
}
}
fn parse_list(slot: &mut Vec<String>, v: Option<&str>)
-> bool {
match v {
Some(s) => {
for s in s.words() {
slot.push(s.to_string());
}
true
},
None => false,
}
}
fn parse_uint(slot: &mut uint, v: Option<&str>) -> bool {
use std::from_str::FromStr;
match v.and_then(FromStr::from_str) {
Some(i) => { *slot = i; true },
None => false
}
}
fn parse_passes(slot: &mut Passes, v: Option<&str>) -> bool {
match v {
Some("all") => {
*slot = AllPasses;
true
}
v => {
let mut passes = vec!();
if parse_list(&mut passes, v) {
*slot = SomePasses(passes);
true
} else {
false
}
}
}
}
}
) )
cgoptions!(
ar: Option<String> = (None, parse_opt_string,
"tool to assemble archives with"),
linker: Option<String> = (None, parse_opt_string,
"system linker to link outputs with"),
link_args: Vec<String> = (Vec::new(), parse_list,
"extra arguments to pass to the linker (space separated)"),
lto: bool = (false, parse_bool,
"perform LLVM link-time optimizations"),
target_cpu: String = ("generic".to_string(), parse_string,
"select target processor (llc -mcpu=help for details)"),
target_feature: String = ("".to_string(), parse_string,
"target specific attributes (llc -mattr=help for details)"),
passes: Vec<String> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"),
llvm_args: Vec<String> = (Vec::new(), parse_list,
"a list of arguments to pass to llvm (space separated)"),
save_temps: bool = (false, parse_bool,
"save all temporary output files during compilation"),
rpath: bool = (false, parse_bool,
"set rpath values in libs/exes"),
no_prepopulate_passes: bool = (false, parse_bool,
"don't pre-populate the pass manager with a list of passes"),
no_vectorize_loops: bool = (false, parse_bool,
"don't run the loop vectorization optimization passes"),
no_vectorize_slp: bool = (false, parse_bool,
"don't run LLVM's SLP vectorization pass"),
soft_float: bool = (false, parse_bool,
"generate software floating point library calls"),
prefer_dynamic: bool = (false, parse_bool,
"prefer dynamic linking to static linking"),
no_integrated_as: bool = (false, parse_bool,
"use an external assembler rather than LLVM's integrated one"),
no_redzone: bool = (false, parse_bool,
"disable the use of the redzone"),
relocation_model: String = ("pic".to_string(), parse_string,
"choose the relocation model to use (llc -relocation-model for details)"),
code_model: String = ("default".to_string(), parse_string,
"choose the code model to use (llc -code-model for details)"),
metadata: Vec<String> = (Vec::new(), parse_list,
"metadata to mangle symbol names with"),
extra_filename: String = ("".to_string(), parse_string,
"extra data to put in each output filename"),
codegen_units: uint = (1, parse_uint,
"divide crate into N units to optimize in parallel"),
remark: Passes = (SomePasses(Vec::new()), parse_passes,
"print remarks for these optimization passes (space separated, or \"all\")"),
no_stack_check: bool = (false, parse_bool,
"disable checks for stack exhaustion (a memory-safety hazard!)"),
)
pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
{
let mut cg = basic_codegen_options();
for option in matches.opt_strs("C").into_iter() {
let mut iter = option.as_slice().splitn(1, '=');
let key = iter.next().unwrap();
let value = iter.next();
let option_to_lookup = key.replace("-", "_");
let mut found = false;
for &(candidate, setter, _) in CG_OPTIONS.iter() {
if option_to_lookup.as_slice() != candidate { continue }
if !setter(&mut cg, value) {
match value {
Some(..) => {
early_error(format!("codegen option `{}` takes no \
value", key).as_slice())
}
None => {
early_error(format!("codegen option `{0}` requires \
a value (-C {0}=<value>)",
key).as_slice())
}
}
}
found = true;
break;
}
if !found {
early_error(format!("unknown codegen option: `{}`",
key).as_slice());
}
}
return cg;
}
pub fn default_lib_output() -> CrateType {
CrateTypeRlib
}
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
let tos = match sess.targ_cfg.os {
abi::OsWindows => InternedString::new("windows"),
abi::OsMacos => InternedString::new("macos"),
abi::OsLinux => InternedString::new("linux"),
abi::OsAndroid => InternedString::new("android"),
abi::OsFreebsd => InternedString::new("freebsd"),
abi::OsDragonfly => InternedString::new("dragonfly"),
abi::OsiOS => InternedString::new("ios"),
};
// ARM is bi-endian, however using NDK seems to default
// to little-endian unless a flag is provided.
let (end,arch,wordsz) = match sess.targ_cfg.arch {
abi::X86 => ("little", "x86", "32"),
abi::X86_64 => ("little", "x86_64", "64"),
abi::Arm => ("little", "arm", "32"),
abi::Mips => ("big", "mips", "32"),
abi::Mipsel => ("little", "mipsel", "32")
};
let fam = match sess.targ_cfg.os {
abi::OsWindows => InternedString::new("windows"),
_ => InternedString::new("unix")
};
let mk = attr::mk_name_value_item_str;
return vec!(// Target bindings.
attr::mk_word_item(fam.clone()),
mk(InternedString::new("target_os"), tos),
mk(InternedString::new("target_family"), fam),
mk(InternedString::new("target_arch"), InternedString::new(arch)),
mk(InternedString::new("target_endian"), InternedString::new(end)),
mk(InternedString::new("target_word_size"),
InternedString::new(wordsz))
);
}
pub fn append_configuration(cfg: &mut ast::CrateConfig,
name: InternedString) {
if !cfg.iter().any(|mi| mi.name() == name) {
cfg.push(attr::mk_word_item(name))
}
}
pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
// Combine the configuration requested by the session (command line) with
// some default and generated configuration items
let default_cfg = default_configuration(sess);
let mut user_cfg = sess.opts.cfg.clone();
// If the user wants a test runner, then add the test cfg
if sess.opts.test {
append_configuration(&mut user_cfg, InternedString::new("test"))
}
let mut v = user_cfg.into_iter().collect::<Vec<_>>();
v.push_all(default_cfg.as_slice());
v
}
pub fn get_os(triple: &str) -> Option<abi::Os> {
for &(name, os) in os_names.iter() {
if triple.contains(name) { return Some(os) }
}
None
}
#[allow(non_upper_case_globals)]
static os_names : &'static [(&'static str, abi::Os)] = &[
("mingw32", abi::OsWindows),
("win32", abi::OsWindows),
("windows", abi::OsWindows),
("darwin", abi::OsMacos),
("android", abi::OsAndroid),
("linux", abi::OsLinux),
("freebsd", abi::OsFreebsd),
("dragonfly", abi::OsDragonfly),
("ios", abi::OsiOS)];
pub fn get_arch(triple: &str) -> Option<abi::Architecture> {
for &(arch, abi) in architecture_abis.iter() {
if triple.contains(arch) { return Some(abi) }
}
None
}
#[allow(non_upper_case_globals)]
static architecture_abis : &'static [(&'static str, abi::Architecture)] = &[
("i386", abi::X86),
("i486", abi::X86),
("i586", abi::X86),
("i686", abi::X86),
("i786", abi::X86),
("x86_64", abi::X86_64),
("arm", abi::Arm),
("xscale", abi::Arm),
("thumb", abi::Arm),
("mipsel", abi::Mipsel),
("mips", abi::Mips)];
pub fn build_target_config(sopts: &Options) -> Config {
let os = match get_os(sopts.target_triple.as_slice()) {
Some(os) => os,
None => early_error("unknown operating system")
};
let arch = match get_arch(sopts.target_triple.as_slice()) {
Some(arch) => arch,
None => {
early_error(format!("unknown architecture: {}",
sopts.target_triple.as_slice()).as_slice())
}
};
let (int_type, uint_type) = match arch {
abi::X86 => (ast::TyI32, ast::TyU32),
abi::X86_64 => (ast::TyI64, ast::TyU64),
abi::Arm => (ast::TyI32, ast::TyU32),
abi::Mips => (ast::TyI32, ast::TyU32),
abi::Mipsel => (ast::TyI32, ast::TyU32)
};
let target_triple = sopts.target_triple.clone();
let target_strs = match arch {
abi::X86 => x86::get_target_strs(target_triple, os),
abi::X86_64 => x86_64::get_target_strs(target_triple, os),
abi::Arm => arm::get_target_strs(target_triple, os),
abi::Mips => mips::get_target_strs(target_triple, os),
abi::Mipsel => mipsel::get_target_strs(target_triple, os)
};
Config {
os: os,
arch: arch,
target_strs: target_strs,
int_type: int_type,
uint_type: uint_type,
}
}
// rustc command line options
pub fn optgroups() -> Vec<getopts::OptGroup> {
vec!(
optflag("h", "help", "Display this message"),
optmulti("", "cfg", "Configure the compilation environment", "SPEC"),
optmulti("L", "", "Add a directory to the library search path", "PATH"),
optmulti("", "crate-type", "Comma separated list of types of crates
for the compiler to emit",
"[bin|lib|rlib|dylib|staticlib]"),
optmulti("", "emit", "Comma separated list of types of output for the compiler to emit",
"[asm|bc|ir|obj|link]"),
optopt("", "crate-name", "Specify the name of the crate being built",
"NAME"),
optflag("", "print-crate-name", "Output the crate name and exit"),
optflag("", "print-file-name", "Output the file(s) that would be written if compilation \
continued and exit"),
optflag("", "crate-file-name", "deprecated in favor of --print-file-name"),
optflag("g", "", "Equivalent to --debuginfo=2"),
optopt("", "debuginfo", "Emit DWARF debug info to the objects created:
0 = no debug info,
1 = line-tables only (for stacktraces and breakpoints),
2 = full debug info with variable and type information (same as -g)", "LEVEL"),
optflag("", "no-trans", "Run all passes except translation; no output"),
optflag("", "no-analysis",
"Parse and expand the source, but run no analysis and produce no output"),
optflag("O", "", "Equivalent to --opt-level=2"),
optopt("o", "", "Write output to <filename>", "FILENAME"),
optopt("", "opt-level", "Optimize with possible levels 0-3", "LEVEL"),
optopt( "", "out-dir", "Write output to compiler-chosen filename in <dir>", "DIR"),
optflag("", "parse-only", "Parse only; do not compile, assemble, or link"),
optopt("", "explain", "Provide a detailed explanation of an error message", "OPT"),
optflagopt("", "pretty",
"Pretty-print the input instead of compiling;
valid types are: `normal` (un-annotated source),
`expanded` (crates expanded),
`typed` (crates expanded, with type annotations),
`expanded,identified` (fully parenthesized, AST nodes with IDs), or
`flowgraph=<nodeid>` (graphviz formatted flowgraph for node)",
"TYPE"),
optflagopt("", "dep-info",
"Output dependency info to <filename> after compiling, \
in a format suitable for use by Makefiles", "FILENAME"),
optopt("", "sysroot", "Override the system root", "PATH"),
optflag("", "test", "Build a test harness"),
optopt("", "target", "Target triple cpu-manufacturer-kernel[-os]
to compile for (see chapter 3.4 of http://www.sourceware.org/autobook/
for details)", "TRIPLE"),
optmulti("W", "warn", "Set lint warnings", "OPT"),
optmulti("A", "allow", "Set lint allowed", "OPT"),
optmulti("D", "deny", "Set lint denied", "OPT"),
optmulti("F", "forbid", "Set lint forbidden", "OPT"),
optmulti("C", "codegen", "Set a codegen option", "OPT[=VALUE]"),
optmulti("Z", "", "Set internal debugging options", "FLAG"),
optflagopt("v", "version", "Print version info and exit", "verbose"),
optopt("", "color", "Configure coloring of output:
auto = colorize, if output goes to a tty (default);
always = always colorize output;
never = never colorize output", "auto|always|never"),
optmulti("", "extern", "Specify where an external rust library is located",
"NAME=PATH"),
)
}
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
cfgspecs.into_iter().map(|s| {
parse::parse_meta_from_source_str("cfgspec".to_string(),
s.to_string(),
Vec::new(),
&parse::new_parse_sess())
}).collect::<ast::CrateConfig>()
}
pub fn build_session_options(matches: &getopts::Matches) -> Options {
let unparsed_crate_types = matches.opt_strs("crate-type");
let crate_types = parse_crate_types_from_list(unparsed_crate_types)
.unwrap_or_else(|e| early_error(e.as_slice()));
let parse_only = matches.opt_present("parse-only");
let no_trans = matches.opt_present("no-trans");
let no_analysis = matches.opt_present("no-analysis");
let mut lint_opts = vec!();
let mut describe_lints = false;
for &level in [lint::Allow, lint::Warn, lint::Deny, lint::Forbid].iter() {
for lint_name in matches.opt_strs(level.as_str()).into_iter() {
if lint_name.as_slice() == "help" {
describe_lints = true;
} else {
lint_opts.push((lint_name.replace("-", "_").into_string(), level));
}
}
}
let mut debugging_opts = 0;
let debug_flags = matches.opt_strs("Z");
let debug_map = debugging_opts_map();
for debug_flag in debug_flags.iter() {
let mut this_bit = 0;
for tuple in debug_map.iter() {
let (name, bit) = match *tuple { (ref a, _, b) => (a, b) };
if *name == debug_flag.as_slice() {
this_bit = bit;
break;
}
}
if this_bit == 0 {
early_error(format!("unknown debug flag: {}",
*debug_flag).as_slice())
}
debugging_opts |= this_bit;
}
if debugging_opts & DEBUG_LLVM != 0 {
unsafe { llvm::LLVMSetDebug(1); }
}
let mut output_types = Vec::new();
if !parse_only && !no_trans {
let unparsed_output_types = matches.opt_strs("emit");
for unparsed_output_type in unparsed_output_types.iter() {
for part in unparsed_output_type.as_slice().split(',') {
let output_type = match part.as_slice() {
"asm" => write::OutputTypeAssembly,
"ir" => write::OutputTypeLlvmAssembly,
"bc" => write::OutputTypeBitcode,
"obj" => write::OutputTypeObject,
"link" => write::OutputTypeExe,
_ => {
early_error(format!("unknown emission type: `{}`",
part).as_slice())
}
};
output_types.push(output_type)
}
}
};
output_types.as_mut_slice().sort();
output_types.dedup();
if output_types.len() == 0 {
output_types.push(write::OutputTypeExe);
}
let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m));
let target = matches.opt_str("target").unwrap_or(
driver::host_triple().to_string());
let opt_level = {
if matches.opt_present("O") {
if matches.opt_present("opt-level") {
early_error("-O and --opt-level both provided");
}
Default
} else if matches.opt_present("opt-level") {
match matches.opt_str("opt-level").as_ref().map(|s| s.as_slice()) {
None |
Some("0") => No,
Some("1") => Less,
Some("2") => Default,
Some("3") => Aggressive,
Some(arg) => {
early_error(format!("optimization level needs to be \
between 0-3 (instead was `{}`)",
arg).as_slice());
}
}
} else {
No
}
};
let gc = debugging_opts & GC != 0;
let debuginfo = if matches.opt_present("g") {
if matches.opt_present("debuginfo") {
early_error("-g and --debuginfo both provided");
}
FullDebugInfo
} else if matches.opt_present("debuginfo") {
match matches.opt_str("debuginfo").as_ref().map(|s| s.as_slice()) {
Some("0") => NoDebugInfo,
Some("1") => LimitedDebugInfo,
None |
Some("2") => FullDebugInfo,
Some(arg) => {
early_error(format!("debug info level needs to be between \
0-2 (instead was `{}`)",
arg).as_slice());
}
}
} else {
NoDebugInfo
};
let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| {
Path::new(s.as_slice())
}).collect();
let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
let test = matches.opt_present("test");
let write_dependency_info = (matches.opt_present("dep-info"),
matches.opt_str("dep-info")
.map(|p| Path::new(p)));
let print_metas = (matches.opt_present("print-crate-name"),
matches.opt_present("print-file-name") ||
matches.opt_present("crate-file-name"));
if matches.opt_present("crate-file-name") {
early_warn("the --crate-file-name argument has been renamed to \
--print-file-name");
}
let cg = build_codegen_options(matches);
if !cg.remark.is_empty() && debuginfo == NoDebugInfo {
early_warn("-C remark will not show source locations without --debuginfo");
}
let color = match matches.opt_str("color").as_ref().map(|s| s.as_slice()) {
Some("auto") => Auto,
Some("always") => Always,
Some("never") => Never,
None => Auto,
Some(arg) => {
early_error(format!("argument for --color must be auto, always \
or never (instead was `{}`)",
arg).as_slice())
}
};
let mut externs = HashMap::new();
for arg in matches.opt_strs("extern").iter() {
let mut parts = arg.as_slice().splitn(1, '=');
let name = match parts.next() {
Some(s) => s,
None => early_error("--extern value must not be empty"),
};
let location = match parts.next() {
Some(s) => s,
None => early_error("--extern value must be of the format `foo=bar`"),
};
match externs.entry(name.to_string()) {
Vacant(entry) => { entry.set(vec![location.to_string()]); },
Occupied(mut entry) => { entry.get_mut().push(location.to_string()); },
}
}
let crate_name = matches.opt_str("crate-name");
Options {
crate_types: crate_types,
gc: gc,
optimize: opt_level,
debuginfo: debuginfo,
lint_opts: lint_opts,
describe_lints: describe_lints,
output_types: output_types,
addl_lib_search_paths: RefCell::new(addl_lib_search_paths),
maybe_sysroot: sysroot_opt,
target_triple: target,
cfg: cfg,
test: test,
parse_only: parse_only,
no_trans: no_trans,
no_analysis: no_analysis,
debugging_opts: debugging_opts,
write_dependency_info: write_dependency_info,
print_metas: print_metas,
cg: cg,
color: color,
externs: externs,
crate_name: crate_name,
alt_std_name: None
}
}
pub fn parse_crate_types_from_list(list_list: Vec<String>) -> Result<Vec<CrateType>, String> {
let mut crate_types: Vec<CrateType> = Vec::new();
for unparsed_crate_type in list_list.iter() {
for part in unparsed_crate_type.as_slice().split(',') {
let new_part = match part {
"lib" => default_lib_output(),
"rlib" => CrateTypeRlib,
"staticlib" => CrateTypeStaticlib,
"dylib" => CrateTypeDylib,
"bin" => CrateTypeExecutable,
_ => {
return Err(format!("unknown crate type: `{}`",
part));
}
};
crate_types.push(new_part)
}
}
return Ok(crate_types);
}
impl fmt::Show for CrateType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CrateTypeExecutable => "bin".fmt(f),
CrateTypeDylib => "dylib".fmt(f),
CrateTypeRlib => "rlib".fmt(f),
CrateTypeStaticlib => "staticlib".fmt(f)
}
}
}
#[cfg(test)]
mod test {
use driver::config::{build_configuration, optgroups, build_session_options};
use driver::session::build_session;
use getopts::getopts;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::diagnostics;
// When the user supplies --test we should implicitly supply --cfg test
#[test]
fn test_switch_implies_cfg_test() {
let matches =
&match getopts(["--test".to_string()], optgroups().as_slice()) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test: {}", f)
};
let registry = diagnostics::registry::Registry::new([]);
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess);
assert!((attr::contains_name(cfg.as_slice(), "test")));
}
// When the user supplies --test and --cfg test, don't implicitly add
// another --cfg test
#[test]
fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches =
&match getopts(["--test".to_string(), "--cfg=test".to_string()],
optgroups().as_slice()) {
Ok(m) => m,
Err(f) => {
panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f)
}
};
let registry = diagnostics::registry::Registry::new([]);
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess);
let mut test_items = cfg.iter().filter(|m| m.name().equiv(&("test")));
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());
}
}
| 37.232009 | 98 | 0.566887 |
1d95e693f48bed4448e5d726ecebab6ec625992f | 2,171 | /*
* Metal API
*
* This is the API for Equinix Metal Product. Interact with your devices, user account, and projects.
*
* The version of the OpenAPI document: 1.0.0
* Contact: [email protected]
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Interconnection {
#[serde(rename = "id", skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "name", skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "description", skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "contact_email", skip_serializing_if = "Option::is_none")]
pub contact_email: Option<String>,
#[serde(rename = "status", skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub _type: Option<String>,
#[serde(rename = "redundancy", skip_serializing_if = "Option::is_none")]
pub redundancy: Option<String>,
/// The connection's speed in bps.
#[serde(rename = "speed", skip_serializing_if = "Option::is_none")]
pub speed: Option<i32>,
#[serde(rename = "tags", skip_serializing_if = "Option::is_none")]
pub tags: Option<Vec<String>>,
#[serde(rename = "ports", skip_serializing_if = "Option::is_none")]
pub ports: Option<Vec<crate::models::InterconnectionPort>>,
#[serde(rename = "facility", skip_serializing_if = "Option::is_none")]
pub facility: Option<crate::models::Href>,
#[serde(rename = "organization", skip_serializing_if = "Option::is_none")]
pub organization: Option<crate::models::Href>,
}
impl Interconnection {
pub fn new() -> Interconnection {
Interconnection {
id: None,
name: None,
description: None,
contact_email: None,
status: None,
_type: None,
redundancy: None,
speed: None,
tags: None,
ports: None,
facility: None,
organization: None,
}
}
}
| 34.460317 | 101 | 0.634731 |
e23a953fef008653a3e767586ed2274cce78e4e4 | 11,943 | use fnv::{FnvHashMap, FnvHashSet};
use grid::{Grid, GridShape};
use intersect::Intersect;
use line_path::LinePath;
use ordered_float::OrderedFloat;
use segments::LineSegment;
use stable_vec::StableVec;
use std::collections::hash_map::Entry;
use util::SmallSortedSet;
use {P2, V2, VecLike, N};
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct PieceSegmentIndex {
pub piece_idx: usize,
pub segment_idx: usize,
}
#[derive(Clone)]
pub struct Embedding<L: Clone> {
piece_segment_grid: Grid<PieceSegmentIndex>,
pub pieces: StableVec<(LinePath, L)>,
}
struct PieceIntersection {
segment_idx: usize,
along: N,
position: P2,
}
impl<L: Clone> Embedding<L> {
pub fn new(cell_width: N) -> Self {
Embedding {
piece_segment_grid: Grid::new(cell_width),
pieces: StableVec::new(),
}
}
pub fn insert(&mut self, new_path: LinePath, label: L) {
let mut intersections_on_new: Vec<PieceIntersection> = Vec::new();
let mut intersections_on_others: FnvHashMap<usize, Vec<PieceIntersection>> =
FnvHashMap::default();
for (new_segment_idx, new_segment) in new_path.segments().enumerate() {
let mut seen_other_piece_segments = SmallSortedSet::<[PieceSegmentIndex; 16]>::new();
self.piece_segment_grid.visit(&new_segment, |cell_content| {
for other_piece_segment_idx in cell_content.iter() {
if !seen_other_piece_segments.contains(other_piece_segment_idx) {
let other_segment = self.pieces[other_piece_segment_idx.piece_idx]
.0
.nth_segment(other_piece_segment_idx.segment_idx);
for intersection in (new_segment, other_segment).intersect() {
intersections_on_new.push(PieceIntersection {
segment_idx: new_segment_idx,
along: intersection.along_a,
position: intersection.position,
});
let other_intersection = PieceIntersection {
segment_idx: other_piece_segment_idx.segment_idx,
along: intersection.along_b,
position: intersection.position,
};
match intersections_on_others.entry(other_piece_segment_idx.piece_idx) {
Entry::Vacant(vacant) => {
vacant.insert(vec![other_intersection]);
}
Entry::Occupied(occupied) => {
occupied.into_mut().push(other_intersection);
}
}
}
seen_other_piece_segments.insert(*other_piece_segment_idx);
}
}
})
}
for (other_piece_idx, other_intersections) in intersections_on_others.into_iter() {
let (other_piece, other_label) = self.remove_piece(other_piece_idx);
self.insert_piece_splits(other_piece, other_intersections, other_label)
}
if intersections_on_new.is_empty() {
self.insert_whole_piece(new_path, label);
} else {
self.insert_piece_splits(new_path, intersections_on_new, label);
}
}
fn insert_piece_splits(
&mut self,
initial_piece: LinePath,
mut intersections: Vec<PieceIntersection>,
label: L,
) {
intersections.sort_unstable_by(|intersection_a, intersection_b| {
intersection_a
.segment_idx
.cmp(&intersection_b.segment_idx)
.then(OrderedFloat(intersection_a.along).cmp(&OrderedFloat(intersection_b.along)))
});
let mut original_points_iter = initial_piece.points.iter().enumerate().peekable();
let mut intersections_iter = intersections.into_iter().peekable();
let mut new_point_groups = vec![VecLike::new()];
loop {
let take_point = {
let maybe_next_point_with_idx = original_points_iter.peek();
let maybe_next_intersection = intersections_iter.peek();
match (maybe_next_point_with_idx, maybe_next_intersection) {
(Some((next_point_idx, _next_point)), Some(next_intersection)) => {
if *next_point_idx <= next_intersection.segment_idx {
true
} else {
false
}
}
(Some(_), None) => true,
(None, Some(_)) => false,
(None, None) => break,
}
};
if take_point {
new_point_groups
.last_mut()
.expect("should have last point group")
.push(
original_points_iter
.next()
.expect("already peeked point!")
.1
.clone(),
);
} else {
let intersection_point = intersections_iter
.next()
.expect("already peeked intersetion!")
.position;
new_point_groups
.last_mut()
.expect("should have last point group")
.push(intersection_point);
new_point_groups.push(Some(intersection_point).into_iter().collect());
}
}
for points_group in new_point_groups.into_iter() {
if let Some(split_piece) = LinePath::new(points_group) {
self.insert_whole_piece(split_piece, label.clone())
}
}
}
fn insert_whole_piece(&mut self, path: LinePath, label: L) {
let piece_idx = self.pieces.next_index();
for (segment_idx, segment) in path.segments().enumerate() {
self.piece_segment_grid.insert_unchecked(
PieceSegmentIndex {
piece_idx,
segment_idx,
},
&segment,
);
}
self.pieces.push((path, label));
}
fn remove_piece(&mut self, piece_idx: usize) -> (LinePath, L) {
let (path, label) = self
.pieces
.remove(piece_idx)
.expect("Tried to remove non-existing piece");
// TODO: this might redundantly try to remove several times from the same grid cells
for segment in path.segments() {
self.piece_segment_grid
.retain(segment, |piece_segment_idx: &mut PieceSegmentIndex| {
piece_segment_idx.piece_idx != piece_idx
})
}
(path, label)
}
pub fn query_pieces<S: GridShape>(
&self,
shape: &S,
) -> Vec<(LineSegment, &L, PieceSegmentIndex)> {
let mut unique_piece_segment_indices =
FnvHashSet::with_capacity_and_hasher(100, Default::default());
self.piece_segment_grid.visit(shape, |cell_content| {
unique_piece_segment_indices.extend(cell_content.iter().cloned());
});
unique_piece_segment_indices
.into_iter()
.map(|piece_segment_idx| {
let (path, label) = &self.pieces[piece_segment_idx.piece_idx];
(
path.nth_segment(piece_segment_idx.segment_idx),
label,
piece_segment_idx,
)
})
.collect()
}
pub fn query_ray_along_x(&self, start_point: P2) -> Vec<(LineSegment, &L, PieceSegmentIndex)> {
let ray_length = (self.piece_segment_grid.max_bounds.0
- (start_point.x / self.piece_segment_grid.cell_width) as i32)
.max(1) as f32 * self.piece_segment_grid.cell_width;
let ray = LineSegment::new(start_point, start_point + V2::new(ray_length, 0.0));
self.query_pieces(&ray)
}
pub fn map_labels<L2: Clone, M: FnMut(&LinePath, &L, &Self) -> L2>(
&mut self,
mut mapper: M,
) -> Embedding<L2> {
let mut new_pieces = StableVec::new();
new_pieces.grow(self.pieces.capacity());
for piece_idx in self.pieces.keys() {
let (piece, old_label) = &self.pieces[piece_idx];
let new_label = mapper(&piece, &old_label, self);
new_pieces
.insert_into_hole(piece_idx, (piece.clone(), new_label))
.ok()
.expect("Index in clone should be free!");
}
Embedding {
piece_segment_grid: self.piece_segment_grid.clone(),
pieces: new_pieces,
}
}
pub fn map_labels_in_place<M: FnMut(&LinePath, &L, &Self) -> L>(&mut self, mut mapper: M) {
let mut new_pieces = StableVec::new();
new_pieces.grow(self.pieces.capacity());
for piece_idx in self.pieces.keys() {
let (piece, old_label) = &self.pieces[piece_idx];
let new_label = mapper(&piece, &old_label, self);
new_pieces
.insert_into_hole(piece_idx, (piece.clone(), new_label))
.ok()
.expect("Index in clone should be free!");
}
self.pieces = new_pieces;
}
pub fn retain_pieces<P: FnMut(&LinePath, &L, &Self) -> bool>(&mut self, mut predicate: P) {
for piece_idx in 0..self.pieces.next_index() {
let (existed, keep) = self
.pieces
.get(piece_idx)
.map(|(path, label)| (true, predicate(path, label, self)))
.unwrap_or((false, false));
if existed && !keep {
self.remove_piece(piece_idx);
}
}
}
}
#[test]
fn embedding_test() {
let mut embedding = Embedding::new(0.25);
#[derive(Clone, PartialEq, Eq, Debug)]
enum Label {
A,
B,
}
// |
// .--x--- B
// A --x--'
// |
embedding.insert(
LinePath::new(vec![
P2::new(0.0, 0.0),
P2::new(1.0, 0.0),
P2::new(1.0, 1.0),
]).unwrap(),
Label::A,
);
embedding.insert(
LinePath::new(vec![
P2::new(0.5, -0.5),
P2::new(0.5, 0.5),
P2::new(1.5, 0.5),
]).unwrap(),
Label::B,
);
assert_eq!(
embedding.pieces.iter().cloned().collect::<Vec<_>>(),
vec![
(
LinePath::new(vec![P2::new(0.0, 0.0), P2::new(0.5, 0.0)]).unwrap(),
Label::A,
),
(
LinePath::new(vec![
P2::new(0.5, 0.0),
P2::new(1.0, 0.0),
P2::new(1.0, 0.5),
]).unwrap(),
Label::A,
),
(
LinePath::new(vec![P2::new(1.0, 0.5), P2::new(1.0, 1.0)]).unwrap(),
Label::A,
),
(
LinePath::new(vec![P2::new(0.5, -0.5), P2::new(0.5, 0.0)]).unwrap(),
Label::B,
),
(
LinePath::new(vec![
P2::new(0.5, 0.0),
P2::new(0.5, 0.5),
P2::new(1.0, 0.5),
]).unwrap(),
Label::B,
),
(
LinePath::new(vec![P2::new(1.0, 0.5), P2::new(1.5, 0.5)]).unwrap(),
Label::B,
),
]
)
}
| 34.22063 | 100 | 0.499707 |
188c544e3c902a5cdf6de6bf0effceaafca773ac | 3,522 | /*
* GraphHopper Directions API
*
* You use the GraphHopper Directions API to add route planning, navigation and route optimization to your software. E.g. the Routing API has turn instructions and elevation data and the Route Optimization API solves your logistic problems and supports various constraints like time window and capacity restrictions. Also it is possible to get all distances between all locations with our fast Matrix API.
*
* OpenAPI spec version: 1.0.0
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
use std::rc::Rc;
use std::borrow::Borrow;
use std::borrow::Cow;
use hyper;
use serde_json;
use futures;
use futures::{Future, Stream};
use hyper::header::UserAgent;
use super::{Error, configuration};
pub struct GeocodingApiClient<C: hyper::client::Connect> {
configuration: Rc<configuration::Configuration<C>>,
}
impl<C: hyper::client::Connect> GeocodingApiClient<C> {
pub fn new(configuration: Rc<configuration::Configuration<C>>) -> GeocodingApiClient<C> {
GeocodingApiClient {
configuration: configuration,
}
}
}
pub trait GeocodingApi {
fn geocode_get(&self, key: &str, q: &str, locale: &str, limit: i32, reverse: bool, point: &str, provider: &str) -> Box<Future<Item = ::models::GeocodingResponse, Error = Error<serde_json::Value>>>;
}
impl<C: hyper::client::Connect>GeocodingApi for GeocodingApiClient<C> {
fn geocode_get(&self, key: &str, q: &str, locale: &str, limit: i32, reverse: bool, point: &str, provider: &str) -> Box<Future<Item = ::models::GeocodingResponse, Error = Error<serde_json::Value>>> {
let configuration: &configuration::Configuration<C> = self.configuration.borrow();
let method = hyper::Method::Get;
let query = ::url::form_urlencoded::Serializer::new(String::new())
.append_pair("q", &q.to_string())
.append_pair("locale", &locale.to_string())
.append_pair("limit", &limit.to_string())
.append_pair("reverse", &reverse.to_string())
.append_pair("point", &point.to_string())
.append_pair("provider", &provider.to_string())
.append_pair("key", &key.to_string())
.finish();
let uri_str = format!("{}/geocode{}", configuration.base_path, query);
let uri = uri_str.parse();
// TODO(farcaller): handle error
// if let Err(e) = uri {
// return Box::new(futures::future::err(e));
// }
let mut req = hyper::Request::new(method, uri.unwrap());
if let Some(ref user_agent) = configuration.user_agent {
req.headers_mut().set(UserAgent::new(Cow::Owned(user_agent.clone())));
}
// send request
Box::new(
configuration.client.request(req)
.map_err(|e| Error::from(e))
.and_then(|resp| {
let status = resp.status();
resp.body().concat2()
.and_then(move |body| Ok((status, body)))
.map_err(|e| Error::from(e))
})
.and_then(|(status, body)| {
if status.is_success() {
Ok(body)
} else {
Err(Error::from((status, &*body)))
}
})
.and_then(|body| {
let parsed: Result<::models::GeocodingResponse, _> = serde_json::from_slice(&body);
parsed.map_err(|e| Error::from(e))
})
)
}
}
| 36.6875 | 405 | 0.599659 |
ed273063e8bb8bd953c4105669ab2b5776ea9acd | 13,703 | /*--------------------------------------------------------------------------
black
The MIT License (MIT)
Copyright (c) 2019 Haydn Paterson (sinclair) <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---------------------------------------------------------------------------*/
use black_math::{Vec2};
use std::cmp::{max, min};
use std::mem::swap;
use super::DepthBuffer;
use super::FragmentProgram;
use super::Interpolate;
use super::TargetBuffer;
use super::VertexProgram;
pub struct Raster;
impl Raster {
#[inline(always)]
pub fn triangle<TVertexProgram, TFragmentProgram, TUniform, TVertex, TVarying, TTargetBuffer>(
vertex: &TVertexProgram,
fragment: &TFragmentProgram,
depth: &mut DepthBuffer,
target: &mut TTargetBuffer,
uniform: &TUniform,
vertex_0: &TVertex,
vertex_1: &TVertex,
vertex_2: &TVertex,
) where
TVertexProgram: VertexProgram<Uniform = TUniform, Vertex = TVertex, Varying = TVarying>,
TFragmentProgram: FragmentProgram<Uniform = TUniform, Varying = TVarying>,
TVarying: Interpolate,
TTargetBuffer: TargetBuffer,
{
// compute half width and height.
let width = target.width() as f32;
let height = target.height() as f32;
let half_width = width * 0.5;
let half_height = height * 0.5;
// setup vrs for this primitive.
let mut varying_0 = Interpolate::new();
let mut varying_1 = Interpolate::new();
let mut varying_2 = Interpolate::new();
// execute vertex shader, store position for interpolation.
let position_0 = vertex.main(&uniform, &vertex_0, &mut varying_0);
let position_1 = vertex.main(&uniform, &vertex_1, &mut varying_1);
let position_2 = vertex.main(&uniform, &vertex_2, &mut varying_2);
// prevent z less than 0.0 errors, discard the triangle.
if position_0.z < 0.0 || position_1.z < 0.0 || position_2.z < 0.0 {
// todo: implement frustum clipping
return;
}
// calculate positions in clip space.
let clippos_0 = Vec2::new(
((position_0.x / position_0.w) * width) + half_width,
((-position_0.y / position_0.w) * height) + half_height,
);
let clippos_1 = Vec2::new(
((position_1.x / position_1.w) * width) + half_width,
((-position_1.y / position_1.w) * height) + half_height,
);
let clippos_2 = Vec2::new(
((position_2.x / position_2.w) * width) + half_width,
((-position_2.y / position_2.w) * height) + half_height,
);
// run fragment processor
if Self::edge(&clippos_0, &clippos_1, &clippos_2) >= 0.0 {
Self::draw_triangle(
fragment,
depth,
target,
uniform,
&Interpolate::correct(&varying_0, &position_0.z),
&Interpolate::correct(&varying_1, &position_1.z),
&Interpolate::correct(&varying_2, &position_2.z),
&clippos_0,
&clippos_1,
&clippos_2,
&(1.0 / position_0.z),
&(1.0 / position_1.z),
&(1.0 / position_2.z),
);
}
}
#[inline(always)]
fn draw_triangle<TTargetBuffer, TFragmentProgram, TVarying, TUniform>(
fragment: &TFragmentProgram,
depth: &mut DepthBuffer,
target: &mut TTargetBuffer,
uniform: &TUniform,
varying_0: &TVarying,
varying_1: &TVarying,
varying_2: &TVarying,
clippos_0: &Vec2,
clippos_1: &Vec2,
clippos_2: &Vec2,
corrected_z_0: &f32,
corrected_z_1: &f32,
corrected_z_2: &f32,
) where
TFragmentProgram: FragmentProgram<Uniform = TUniform, Varying = TVarying>,
TVarying: Interpolate,
TTargetBuffer: TargetBuffer,
{
// clone clippos for sorting.
let mut ordered_0 = clippos_0.clone();
let mut ordered_1 = clippos_1.clone();
let mut ordered_2 = clippos_2.clone();
// sort ordered y-descending.
if ordered_0.y > ordered_1.y {
swap(&mut ordered_0, &mut ordered_1);
}
if ordered_1.y > ordered_2.y {
swap(&mut ordered_1, &mut ordered_2);
}
if ordered_0.y > ordered_1.y {
swap(&mut ordered_0, &mut ordered_1);
}
// calculate slopes for the given triangle types.
// P0
// /|
// / |
// / |
// / |
// P1 \ |
// \ |
// \ |
// \|
// P2
let slope_0 = if ordered_1.y - ordered_0.y > 0.0 {
(ordered_1.x - ordered_0.x) / (ordered_1.y - ordered_0.y)
} else {
0.0
};
// P0
// |\
// | \
// | \
// | \
// | / P1
// | /
// | /
// |/
// P2
let slope_1 = if ordered_2.y - ordered_0.y > 0.0 {
(ordered_2.x - ordered_0.x) / (ordered_2.y - ordered_0.y)
} else {
0.0
};
// draw scanlines
if slope_0 > slope_1 {
for y in ordered_0.y as i32..=ordered_2.y as i32 {
if (y as f32) < ordered_1.y {
let (min_x, max_x) = Self::calculate_x_scan_range(
y,
&ordered_0,
&ordered_2,
&ordered_0,
&ordered_1,
);
Self::draw_line(
fragment,
depth,
target,
uniform,
&clippos_0,
&clippos_1,
&clippos_2,
&varying_0,
&varying_1,
&varying_2,
&corrected_z_0,
&corrected_z_1,
&corrected_z_2,
min_x,
max_x,
y,
)
} else {
let (min_x, max_x) = Self::calculate_x_scan_range(
y,
&ordered_0,
&ordered_2,
&ordered_1,
&ordered_2,
);
Self::draw_line(
fragment,
depth,
target,
uniform,
&clippos_0,
&clippos_1,
&clippos_2,
&varying_0,
&varying_1,
&varying_2,
&corrected_z_0,
&corrected_z_1,
&corrected_z_2,
min_x,
max_x,
y,
)
}
}
} else {
for y in ordered_0.y as i32 ..= ordered_2.y as i32 {
if (y as f32) < ordered_1.y {
let (min_x, max_x) = Self::calculate_x_scan_range(
y,
&ordered_0,
&ordered_1,
&ordered_0,
&ordered_2,
);
Self::draw_line(
fragment,
depth,
target,
uniform,
&clippos_0,
&clippos_1,
&clippos_2,
&varying_0,
&varying_1,
&varying_2,
&corrected_z_0,
&corrected_z_1,
&corrected_z_2,
min_x,
max_x,
y,
)
} else {
let (min_x, max_x) = Self::calculate_x_scan_range(
y,
&ordered_1,
&ordered_2,
&ordered_0,
&ordered_2,
);
Self::draw_line(
fragment,
depth,
target,
uniform,
&clippos_0,
&clippos_1,
&clippos_2,
&varying_0,
&varying_1,
&varying_2,
&corrected_z_0,
&corrected_z_1,
&corrected_z_2,
min_x,
max_x,
y,
)
}
}
}
}
#[inline(always)]
fn calculate_x_scan_range(y: i32, ordered_0: &Vec2, ordered_1: &Vec2, ordered_2: &Vec2, ordered_3: &Vec2) -> (i32, i32) {
let gradient_0 = if ordered_0.y != ordered_1.y {
(y as f32 - ordered_0.y) / (ordered_1.y - ordered_0.y)
} else {
1.0
};
let gradient_1 = if ordered_2.y != ordered_3.y {
(y as f32 - ordered_2.y) / (ordered_3.y - ordered_2.y)
} else {
1.0
};
let min_x = ordered_0.x + (ordered_1.x - ordered_0.x) * Self::clamp(gradient_0, 0.0, 1.0);
let max_x = ordered_2.x + (ordered_3.x - ordered_2.x) * Self::clamp(gradient_1, 0.0, 1.0);
(min_x as i32, max_x as i32)
}
#[inline(always)]
fn draw_line<TTargetBuffer, TFragmentProgram, TVarying, TUniform>(
fragment: &TFragmentProgram,
depth: &mut DepthBuffer,
target: &mut TTargetBuffer,
uniform: &TUniform,
clippos_0: &Vec2,
clippos_1: &Vec2,
clippos_2: &Vec2,
varying_0: &TVarying,
varying_1: &TVarying,
varying_2: &TVarying,
corrected_z_0: &f32,
corrected_z_1: &f32,
corrected_z_2: &f32,
min_x: i32,
max_x: i32,
y: i32,
) where
TFragmentProgram: FragmentProgram<Uniform = TUniform, Varying = TVarying>,
TVarying: Interpolate,
TTargetBuffer: TargetBuffer,
{
// exit if outside viewport height.
if y < 0 || y >= target.height() {
return;
}
// min | max within viewport width.
let min_x = max(min_x, 0);
let max_x = min(max_x, target.width() - 1);
// calculate edge value
let edge = Self::edge(clippos_0, clippos_1, clippos_2);
for x in min_x..max_x {
// calculate weights
let pixel_coordinate = Vec2::new((x as f32) + 0.0, (y as f32) + 0.0);
let weight_0 = Self::edge(clippos_2, clippos_1, &pixel_coordinate) / edge;
let weight_1 = Self::edge(clippos_0, clippos_2, &pixel_coordinate) / edge;
let weight_2 = Self::edge(clippos_1, clippos_0, &pixel_coordinate) / edge;
// calculate depth of fragment.
let calculated_depth =
(weight_0 * corrected_z_0)
+ (weight_1 * corrected_z_1)
+ (weight_2 * corrected_z_2);
// check depth and discard, interpolate and render.
if calculated_depth < depth.get(x as usize, y as usize) {
depth.set(x as usize, y as usize, calculated_depth);
let varying = TVarying::interpolate(
varying_0,
varying_1,
varying_2,
&weight_0,
&weight_1,
&weight_2,
&calculated_depth,
);
let color = fragment.main(uniform, &varying);
target.set(x, y, color);
}
}
}
#[inline(always)]
fn clamp(value: f32, min: f32, max: f32) -> f32 {
min.max(value.min(max))
}
#[inline(always)]
fn edge(v0: &Vec2, v1: &Vec2, v2: &Vec2) -> f32 {
(v2.x - v0.x) * (v1.y - v0.y) - (v2.y - v0.y) * (v1.x - v0.x)
}
}
| 35.226221 | 125 | 0.458221 |
8a5f175ea83c5ead810e75b2a589082731807d83 | 47,844 | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! This file houses a representation of a SQL plan that is parallel to that found in
//! src/expr/relation/mod.rs, but represents an earlier phase of planning. It's structurally very
//! similar to that file, with some differences which are noted below. It gets turned into that
//! representation via a call to decorrelate().
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::fmt;
use std::mem;
use anyhow::bail;
use expr::DummyHumanizer;
use itertools::Itertools;
use ore::collections::CollectionExt;
use repr::*;
use crate::plan::query::ExprContext;
use crate::plan::typeconv::{self, CastContext};
use crate::plan::Params;
// these happen to be unchanged at the moment, but there might be additions later
pub use expr::{BinaryFunc, ColumnOrder, NullaryFunc, TableFunc, UnaryFunc, VariadicFunc};
use repr::adt::array::ArrayDimension;
use super::Explanation;
#[derive(Debug, Clone, PartialEq, Eq)]
/// Just like MirRelationExpr, except where otherwise noted below.
///
/// - There is no equivalent to `MirRelationExpr::Let`.
pub enum HirRelationExpr {
Constant {
rows: Vec<Row>,
typ: RelationType,
},
Get {
id: expr::Id,
typ: RelationType,
},
Project {
input: Box<HirRelationExpr>,
outputs: Vec<usize>,
},
Map {
input: Box<HirRelationExpr>,
scalars: Vec<HirScalarExpr>,
},
CallTable {
func: TableFunc,
exprs: Vec<HirScalarExpr>,
},
Filter {
input: Box<HirRelationExpr>,
predicates: Vec<HirScalarExpr>,
},
/// Unlike MirRelationExpr, we haven't yet compiled LeftOuter/RightOuter/FullOuter
/// joins away into more primitive exprs
Join {
left: Box<HirRelationExpr>,
right: Box<HirRelationExpr>,
on: HirScalarExpr,
kind: JoinKind,
},
/// Unlike MirRelationExpr, when `key` is empty AND `input` is empty this returns
/// a single row with the aggregates evaluated over empty groups, rather than returning zero
/// rows
Reduce {
input: Box<HirRelationExpr>,
group_key: Vec<usize>,
aggregates: Vec<AggregateExpr>,
expected_group_size: Option<usize>,
},
Distinct {
input: Box<HirRelationExpr>,
},
/// Groups and orders within each group, limiting output.
TopK {
/// The source collection.
input: Box<HirRelationExpr>,
/// Column indices used to form groups.
group_key: Vec<usize>,
/// Column indices used to order rows within groups.
order_key: Vec<ColumnOrder>,
/// Number of records to retain
limit: Option<usize>,
/// Number of records to skip
offset: usize,
},
Negate {
input: Box<HirRelationExpr>,
},
Threshold {
input: Box<HirRelationExpr>,
},
Union {
base: Box<HirRelationExpr>,
inputs: Vec<HirRelationExpr>,
},
DeclareKeys {
input: Box<HirRelationExpr>,
keys: Vec<Vec<usize>>,
},
}
#[derive(Debug, Clone, PartialEq, Eq)]
/// Just like expr::MirScalarExpr, except where otherwise noted below.
pub enum HirScalarExpr {
/// Unlike expr::MirScalarExpr, we can nest HirRelationExprs via eg Exists. This means that a
/// variable could refer to a column of the current input, or to a column of an outer relation.
/// We use ColumnRef to denote the difference.
Column(ColumnRef),
Parameter(usize),
Literal(Row, ColumnType),
CallNullary(NullaryFunc),
CallUnary {
func: UnaryFunc,
expr: Box<HirScalarExpr>,
},
CallBinary {
func: BinaryFunc,
expr1: Box<HirScalarExpr>,
expr2: Box<HirScalarExpr>,
},
CallVariadic {
func: VariadicFunc,
exprs: Vec<HirScalarExpr>,
},
If {
cond: Box<HirScalarExpr>,
then: Box<HirScalarExpr>,
els: Box<HirScalarExpr>,
},
/// Returns true if `expr` returns any rows
Exists(Box<HirRelationExpr>),
/// Given `expr` with arity 1. If expr returns:
/// * 0 rows, return NULL
/// * 1 row, return the value of that row
/// * >1 rows, the sql spec says we should throw an error but we can't
/// (see https://github.com/MaterializeInc/materialize/issues/489)
/// so instead we return all the rows.
/// If there are multiple `Select` expressions in a single SQL query, the result is that we take the product of all of them.
/// This is counter to the spec, but is consistent with eg postgres' treatment of multiple set-returning-functions
/// (see https://tapoueh.org/blog/2017/10/set-returning-functions-and-postgresql-10/).
Select(Box<HirRelationExpr>),
}
/// A `CoercibleScalarExpr` is a [`ScalarExpr`] whose type is not fully
/// determined. Several SQL expressions can be freely coerced based upon where
/// in the expression tree they appear. For example, the string literal '42'
/// will be automatically coerced to the integer 42 if used in a numeric
/// context:
///
/// ```sql
/// SELECT '42' + 42
/// ```
///
/// This separate type gives the code that needs to interact with coercions very
/// fine-grained control over what coercions happen and when.
///
/// The primary driver of coercion is function and operator selection, as
/// choosing the correct function or operator implementation depends on the type
/// of the provided arguments. Coercion also occurs at the very root of the
/// scalar expression tree. For example in
///
/// ```sql
/// SELECT ... WHERE $1
/// ```
///
/// the `WHERE` clause will coerce the contained unconstrained type parameter
/// `$1` to have type bool.
#[derive(Clone, Debug)]
pub enum CoercibleScalarExpr {
Coerced(HirScalarExpr),
Parameter(usize),
LiteralNull,
LiteralString(String),
LiteralRecord(Vec<CoercibleScalarExpr>),
}
impl CoercibleScalarExpr {
pub fn type_as(
self,
ecx: &ExprContext,
ty: &ScalarType,
) -> Result<HirScalarExpr, anyhow::Error> {
let expr = typeconv::plan_coerce(ecx, self, ty)?;
let expr_ty = ecx.scalar_type(&expr);
if ty != &expr_ty {
bail!(
"{} must have type {}, not type {}",
ecx.name,
ecx.humanize_scalar_type(ty),
ecx.humanize_scalar_type(&expr_ty),
);
}
Ok(expr)
}
pub fn type_as_any(self, ecx: &ExprContext) -> Result<HirScalarExpr, anyhow::Error> {
typeconv::plan_coerce(ecx, self, &ScalarType::String)
}
pub fn cast_to(
self,
op: &str,
ecx: &ExprContext,
ccx: CastContext,
ty: &ScalarType,
) -> Result<HirScalarExpr, anyhow::Error> {
let expr = typeconv::plan_coerce(ecx, self, ty)?;
typeconv::plan_cast(op, ecx, ccx, expr, ty)
}
}
/// An expression whose type can be ascertained.
///
/// Abstracts over `ScalarExpr` and `CoercibleScalarExpr`.
pub trait AbstractExpr {
type Type: AbstractColumnType;
/// Computes the type of the expression.
fn typ(
&self,
outers: &[RelationType],
inner: &RelationType,
params: &BTreeMap<usize, ScalarType>,
) -> Self::Type;
}
impl AbstractExpr for CoercibleScalarExpr {
type Type = Option<ColumnType>;
fn typ(
&self,
outers: &[RelationType],
inner: &RelationType,
params: &BTreeMap<usize, ScalarType>,
) -> Self::Type {
match self {
CoercibleScalarExpr::Coerced(expr) => Some(expr.typ(outers, inner, params)),
_ => None,
}
}
}
/// A column type-like object whose underlying scalar type-like object can be
/// ascertained.
///
/// Abstracts over `ColumnType` and `Option<ColumnType>`.
pub trait AbstractColumnType {
type AbstractScalarType;
/// Converts the column type-like object into its inner scalar type-like
/// object.
fn scalar_type(self) -> Self::AbstractScalarType;
}
impl AbstractColumnType for ColumnType {
type AbstractScalarType = ScalarType;
fn scalar_type(self) -> Self::AbstractScalarType {
self.scalar_type
}
}
impl AbstractColumnType for Option<ColumnType> {
type AbstractScalarType = Option<ScalarType>;
fn scalar_type(self) -> Self::AbstractScalarType {
self.map(|t| t.scalar_type)
}
}
impl From<HirScalarExpr> for CoercibleScalarExpr {
fn from(expr: HirScalarExpr) -> CoercibleScalarExpr {
CoercibleScalarExpr::Coerced(expr)
}
}
/// A leveled column reference.
///
/// In the course of decorrelation, multiple levels of nested subqueries are
/// traversed, and references to columns may correspond to different levels
/// of containing outer subqueries.
///
/// A `ColumnRef` allows expressions to refer to columns while being clear
/// about which level the column references without manually performing the
/// bookkeeping tracking their actual column locations.
///
/// Specifically, a `ColumnRef` refers to a column `level` subquery level *out*
/// from the reference, using `column` as a unique identifier in that subquery level.
/// A `level` of zero corresponds to the current scope, and levels increase to
/// indicate subqueries further "outwards".
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ColumnRef {
// scope level, where 0 is the current scope and 1+ are outer scopes.
pub level: usize,
// level-local column identifier used.
pub column: usize,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum JoinKind {
Inner { lateral: bool },
LeftOuter { lateral: bool },
RightOuter,
FullOuter,
}
impl fmt::Display for JoinKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
JoinKind::Inner { lateral: false } => "Inner",
JoinKind::Inner { lateral: true } => "InnerLateral",
JoinKind::LeftOuter { lateral: false } => "LeftOuter",
JoinKind::LeftOuter { lateral: true } => "LeftOuterLateral",
JoinKind::RightOuter => "RightOuter",
JoinKind::FullOuter => "FullOuter",
}
)
}
}
impl JoinKind {
pub fn is_lateral(&self) -> bool {
match self {
JoinKind::Inner { lateral } | JoinKind::LeftOuter { lateral } => *lateral,
JoinKind::RightOuter | JoinKind::FullOuter => false,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AggregateExpr {
pub func: AggregateFunc,
pub expr: Box<HirScalarExpr>,
pub distinct: bool,
}
/// Aggregate functions analogous to `expr::AggregateFunc`, but whose
/// types may be different.
///
/// Specifically, the nullability of the aggregate columns is more common
/// here than in `expr`, as these aggregates may be applied over empty
/// result sets and should be null in those cases, whereas `expr` variants
/// only return null values when supplied nulls as input.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum AggregateFunc {
MaxInt32,
MaxInt64,
MaxFloat32,
MaxFloat64,
MaxDecimal,
MaxBool,
MaxString,
MaxDate,
MaxTimestamp,
MaxTimestampTz,
MinInt32,
MinInt64,
MinFloat32,
MinFloat64,
MinDecimal,
MinBool,
MinString,
MinDate,
MinTimestamp,
MinTimestampTz,
SumInt32,
SumInt64,
SumFloat32,
SumFloat64,
SumDecimal,
Count,
Any,
All,
/// Accumulates JSON-typed `Datum`s into a JSON list.
///
/// WARNING: Unlike the `jsonb_agg` function that is exposed by the SQL
/// layer, this function filters out `Datum::Null`, for consistency with
/// the other aggregate functions.
JsonbAgg,
/// Accumulates any number of `Datum::Dummy`s into `Datum::Dummy`.
///
/// Useful for removing an expensive aggregation while maintaining the shape
/// of a reduce operator.
Dummy,
}
impl AggregateFunc {
/// Converts the `sql::AggregateFunc` to a corresponding `expr::AggregateFunc`.
pub fn into_expr(self) -> expr::AggregateFunc {
match self {
AggregateFunc::MaxInt64 => expr::AggregateFunc::MaxInt64,
AggregateFunc::MaxInt32 => expr::AggregateFunc::MaxInt32,
AggregateFunc::MaxFloat32 => expr::AggregateFunc::MaxFloat32,
AggregateFunc::MaxFloat64 => expr::AggregateFunc::MaxFloat64,
AggregateFunc::MaxDecimal => expr::AggregateFunc::MaxDecimal,
AggregateFunc::MaxBool => expr::AggregateFunc::MaxBool,
AggregateFunc::MaxString => expr::AggregateFunc::MaxString,
AggregateFunc::MaxDate => expr::AggregateFunc::MaxDate,
AggregateFunc::MaxTimestamp => expr::AggregateFunc::MaxTimestamp,
AggregateFunc::MaxTimestampTz => expr::AggregateFunc::MaxTimestampTz,
AggregateFunc::MinInt32 => expr::AggregateFunc::MinInt32,
AggregateFunc::MinInt64 => expr::AggregateFunc::MinInt64,
AggregateFunc::MinFloat32 => expr::AggregateFunc::MinFloat32,
AggregateFunc::MinFloat64 => expr::AggregateFunc::MinFloat64,
AggregateFunc::MinDecimal => expr::AggregateFunc::MinDecimal,
AggregateFunc::MinBool => expr::AggregateFunc::MinBool,
AggregateFunc::MinString => expr::AggregateFunc::MinString,
AggregateFunc::MinDate => expr::AggregateFunc::MinDate,
AggregateFunc::MinTimestamp => expr::AggregateFunc::MinTimestamp,
AggregateFunc::MinTimestampTz => expr::AggregateFunc::MinTimestampTz,
AggregateFunc::SumInt32 => expr::AggregateFunc::SumInt32,
AggregateFunc::SumInt64 => expr::AggregateFunc::SumInt64,
AggregateFunc::SumFloat32 => expr::AggregateFunc::SumFloat32,
AggregateFunc::SumFloat64 => expr::AggregateFunc::SumFloat64,
AggregateFunc::SumDecimal => expr::AggregateFunc::SumDecimal,
AggregateFunc::Count => expr::AggregateFunc::Count,
AggregateFunc::Any => expr::AggregateFunc::Any,
AggregateFunc::All => expr::AggregateFunc::All,
AggregateFunc::JsonbAgg => expr::AggregateFunc::JsonbAgg,
AggregateFunc::Dummy => expr::AggregateFunc::Dummy,
}
}
/// Returns a datum whose inclusion in the aggregation will not change its
/// result.
pub fn identity_datum(&self) -> Datum<'static> {
match self {
AggregateFunc::Any => Datum::False,
AggregateFunc::All => Datum::True,
AggregateFunc::Dummy => Datum::Dummy,
_ => Datum::Null,
}
}
/// The output column type for the result of an aggregation.
///
/// The output column type also contains nullability information, which
/// is (without further information) true for aggregations that are not
/// counts.
pub fn output_type(&self, input_type: ColumnType) -> ColumnType {
let scalar_type = match self {
AggregateFunc::Count => ScalarType::Int64,
AggregateFunc::Any => ScalarType::Bool,
AggregateFunc::All => ScalarType::Bool,
AggregateFunc::JsonbAgg => ScalarType::Jsonb,
AggregateFunc::SumInt32 => ScalarType::Int64,
AggregateFunc::SumInt64 => {
ScalarType::Decimal(repr::adt::decimal::MAX_DECIMAL_PRECISION, 0)
}
_ => input_type.scalar_type,
};
// max/min/sum return null on empty sets
let nullable = !matches!(self, AggregateFunc::Count);
scalar_type.nullable(nullable)
}
}
impl HirRelationExpr {
pub fn typ(
&self,
outers: &[RelationType],
params: &BTreeMap<usize, ScalarType>,
) -> RelationType {
match self {
HirRelationExpr::Constant { typ, .. } => typ.clone(),
HirRelationExpr::Get { typ, .. } => typ.clone(),
HirRelationExpr::Project { input, outputs } => {
let input_typ = input.typ(outers, params);
RelationType::new(
outputs
.iter()
.map(|&i| input_typ.column_types[i].clone())
.collect(),
)
}
HirRelationExpr::Map { input, scalars } => {
let mut typ = input.typ(outers, params);
for scalar in scalars {
typ.column_types.push(scalar.typ(outers, &typ, params));
}
typ
}
HirRelationExpr::CallTable { func, exprs: _ } => func.output_type(),
HirRelationExpr::Filter { input, .. } | HirRelationExpr::TopK { input, .. } => {
input.typ(outers, params)
}
HirRelationExpr::Join {
left, right, kind, ..
} => {
let left_nullable = matches!(kind, JoinKind::RightOuter | JoinKind::FullOuter);
let right_nullable =
matches!(kind, JoinKind::LeftOuter { .. } | JoinKind::FullOuter);
let lt = left.typ(outers, params).column_types.into_iter().map(|t| {
let nullable = t.nullable || left_nullable;
t.nullable(nullable)
});
let outers = if kind.is_lateral() {
let mut outers = outers.to_vec();
outers.push(RelationType::new(lt.clone().collect()));
Cow::Owned(outers)
} else {
Cow::Borrowed(outers)
};
let rt = right
.typ(&outers, params)
.column_types
.into_iter()
.map(|t| {
let nullable = t.nullable || right_nullable;
t.nullable(nullable)
});
RelationType::new(lt.chain(rt).collect())
}
HirRelationExpr::Reduce {
input,
group_key,
aggregates,
expected_group_size: _,
} => {
let input_typ = input.typ(outers, params);
let mut column_types = group_key
.iter()
.map(|&i| input_typ.column_types[i].clone())
.collect::<Vec<_>>();
for agg in aggregates {
column_types.push(agg.typ(outers, &input_typ, params));
}
// TODO(frank): add primary key information.
RelationType::new(column_types)
}
// TODO(frank): check for removal; add primary key information.
HirRelationExpr::Distinct { input }
| HirRelationExpr::Negate { input }
| HirRelationExpr::Threshold { input } => input.typ(outers, params),
HirRelationExpr::Union { base, inputs } => {
let mut base_cols = base.typ(outers, params).column_types;
for input in inputs {
for (base_col, col) in base_cols
.iter_mut()
.zip_eq(input.typ(outers, params).column_types)
{
*base_col = base_col.union(&col).unwrap();
}
}
RelationType::new(base_cols)
}
HirRelationExpr::DeclareKeys { input, keys } => {
input.typ(outers, params).with_keys(keys.clone())
}
}
}
pub fn arity(&self) -> usize {
match self {
HirRelationExpr::Constant { typ, .. } => typ.column_types.len(),
HirRelationExpr::Get { typ, .. } => typ.column_types.len(),
HirRelationExpr::Project { outputs, .. } => outputs.len(),
HirRelationExpr::Map { input, scalars } => input.arity() + scalars.len(),
HirRelationExpr::CallTable { func, .. } => func.output_arity(),
HirRelationExpr::Filter { input, .. }
| HirRelationExpr::TopK { input, .. }
| HirRelationExpr::Distinct { input }
| HirRelationExpr::Negate { input }
| HirRelationExpr::DeclareKeys { input, .. }
| HirRelationExpr::Threshold { input } => input.arity(),
HirRelationExpr::Join { left, right, .. } => left.arity() + right.arity(),
HirRelationExpr::Union { base, .. } => base.arity(),
HirRelationExpr::Reduce {
group_key,
aggregates,
..
} => group_key.len() + aggregates.len(),
}
}
/// Pretty-print this HirRelationExpr to a string.
pub fn pretty(&self) -> String {
Explanation::new(self, &DummyHumanizer).to_string()
}
pub fn is_join_identity(&self) -> bool {
match self {
HirRelationExpr::Constant { rows, .. } => rows.len() == 1 && self.arity() == 0,
_ => false,
}
}
pub fn project(self, outputs: Vec<usize>) -> Self {
if outputs.iter().copied().eq(0..self.arity()) {
// The projection is trivial. Suppress it.
self
} else {
HirRelationExpr::Project {
input: Box::new(self),
outputs,
}
}
}
pub fn map(mut self, scalars: Vec<HirScalarExpr>) -> Self {
if scalars.is_empty() {
// The map is trivial. Suppress it.
self
} else if let HirRelationExpr::Map {
scalars: old_scalars,
input: _,
} = &mut self
{
// Map applied to a map. Fuse the maps.
old_scalars.extend(scalars);
self
} else {
HirRelationExpr::Map {
input: Box::new(self),
scalars,
}
}
}
pub fn filter(self, predicates: Vec<HirScalarExpr>) -> Self {
HirRelationExpr::Filter {
input: Box::new(self),
predicates,
}
}
pub fn declare_keys(self, keys: Vec<Vec<usize>>) -> Self {
HirRelationExpr::DeclareKeys {
input: Box::new(self),
keys,
}
}
pub fn reduce(
self,
group_key: Vec<usize>,
aggregates: Vec<AggregateExpr>,
expected_group_size: Option<usize>,
) -> Self {
HirRelationExpr::Reduce {
input: Box::new(self),
group_key,
aggregates,
expected_group_size,
}
}
#[allow(dead_code)]
pub fn top_k(
self,
group_key: Vec<usize>,
order_key: Vec<ColumnOrder>,
limit: Option<usize>,
offset: usize,
) -> Self {
HirRelationExpr::TopK {
input: Box::new(self),
group_key,
order_key,
limit,
offset,
}
}
pub fn negate(self) -> Self {
HirRelationExpr::Negate {
input: Box::new(self),
}
}
pub fn distinct(self) -> Self {
HirRelationExpr::Distinct {
input: Box::new(self),
}
}
pub fn threshold(self) -> Self {
HirRelationExpr::Threshold {
input: Box::new(self),
}
}
pub fn union(self, other: Self) -> Self {
HirRelationExpr::Union {
base: Box::new(self),
inputs: vec![other],
}
}
pub fn exists(self) -> HirScalarExpr {
HirScalarExpr::Exists(Box::new(self))
}
pub fn select(self) -> HirScalarExpr {
HirScalarExpr::Select(Box::new(self))
}
pub fn take(&mut self) -> HirRelationExpr {
mem::replace(
self,
HirRelationExpr::Constant {
rows: vec![],
typ: RelationType::new(Vec::new()),
},
)
}
// TODO(benesch): these visit methods are too duplicative. Figure out how
// to deduplicate.
pub fn visit<'a, F>(&'a self, f: &mut F)
where
F: FnMut(&'a Self),
{
self.visit1(|e: &HirRelationExpr| e.visit(f));
f(self);
}
pub fn visit1<'a, F>(&'a self, mut f: F)
where
F: FnMut(&'a Self),
{
match self {
HirRelationExpr::Constant { .. }
| HirRelationExpr::Get { .. }
| HirRelationExpr::CallTable { .. } => (),
HirRelationExpr::Project { input, .. } => {
f(input);
}
HirRelationExpr::Map { input, .. } => {
f(input);
}
HirRelationExpr::Filter { input, .. } => {
f(input);
}
HirRelationExpr::Join { left, right, .. } => {
f(left);
f(right);
}
HirRelationExpr::Reduce { input, .. } => {
f(input);
}
HirRelationExpr::Distinct { input } => {
f(input);
}
HirRelationExpr::TopK { input, .. } => {
f(input);
}
HirRelationExpr::Negate { input } => {
f(input);
}
HirRelationExpr::Threshold { input } => {
f(input);
}
HirRelationExpr::DeclareKeys { input, .. } => {
f(input);
}
HirRelationExpr::Union { base, inputs } => {
f(base);
for input in inputs {
f(input);
}
}
}
}
pub fn visit_mut<F>(&mut self, f: &mut F)
where
F: FnMut(&mut Self),
{
self.visit1_mut(|e: &mut HirRelationExpr| e.visit_mut(f));
f(self);
}
pub fn visit1_mut<'a, F>(&'a mut self, mut f: F)
where
F: FnMut(&'a mut Self),
{
match self {
HirRelationExpr::Constant { .. }
| HirRelationExpr::Get { .. }
| HirRelationExpr::CallTable { .. } => (),
HirRelationExpr::Project { input, .. } => {
f(input);
}
HirRelationExpr::Map { input, .. } => {
f(input);
}
HirRelationExpr::Filter { input, .. } => {
f(input);
}
HirRelationExpr::Join { left, right, .. } => {
f(left);
f(right);
}
HirRelationExpr::Reduce { input, .. } => {
f(input);
}
HirRelationExpr::Distinct { input } => {
f(input);
}
HirRelationExpr::TopK { input, .. } => {
f(input);
}
HirRelationExpr::Negate { input } => {
f(input);
}
HirRelationExpr::Threshold { input } => {
f(input);
}
HirRelationExpr::DeclareKeys { input, .. } => {
f(input);
}
HirRelationExpr::Union { base, inputs } => {
f(base);
for input in inputs {
f(input);
}
}
}
}
/// Visits the column references in this relation expression.
///
/// The `depth` argument should indicate the subquery nesting depth of the expression,
/// which will be incremented with each subquery entered and presented to the supplied
/// function `f`.
pub fn visit_columns<F>(&mut self, depth: usize, f: &mut F)
where
F: FnMut(usize, &mut ColumnRef),
{
match self {
HirRelationExpr::Join {
kind,
on,
left,
right,
} => {
left.visit_columns(depth, f);
let depth = if kind.is_lateral() { depth + 1 } else { depth };
right.visit_columns(depth, f);
on.visit_columns(depth, f);
}
HirRelationExpr::Map { scalars, input } => {
for scalar in scalars {
scalar.visit_columns(depth, f);
}
input.visit_columns(depth, f);
}
HirRelationExpr::CallTable { exprs, .. } => {
for expr in exprs {
expr.visit_columns(depth, f);
}
}
HirRelationExpr::Filter { predicates, input } => {
for predicate in predicates {
predicate.visit_columns(depth, f);
}
input.visit_columns(depth, f);
}
HirRelationExpr::Reduce {
aggregates, input, ..
} => {
for aggregate in aggregates {
aggregate.visit_columns(depth, f);
}
input.visit_columns(depth, f);
}
HirRelationExpr::Union { base, inputs } => {
base.visit_columns(depth, f);
for input in inputs {
input.visit_columns(depth, f);
}
}
HirRelationExpr::Project { input, .. }
| HirRelationExpr::Distinct { input }
| HirRelationExpr::TopK { input, .. }
| HirRelationExpr::Negate { input }
| HirRelationExpr::DeclareKeys { input, .. }
| HirRelationExpr::Threshold { input } => {
input.visit_columns(depth, f);
}
HirRelationExpr::Constant { .. } | HirRelationExpr::Get { .. } => (),
}
}
/// Replaces any parameter references in the expression with the
/// corresponding datum from `params`.
pub fn bind_parameters(&mut self, params: &Params) -> Result<(), anyhow::Error> {
match self {
HirRelationExpr::Join {
on, left, right, ..
} => {
on.bind_parameters(params)?;
left.bind_parameters(params)?;
right.bind_parameters(params)
}
HirRelationExpr::Map { scalars, input } => {
for scalar in scalars {
scalar.bind_parameters(params)?;
}
input.bind_parameters(params)
}
HirRelationExpr::CallTable { exprs, .. } => {
for expr in exprs {
expr.bind_parameters(params)?;
}
Ok(())
}
HirRelationExpr::Filter { predicates, input } => {
for predicate in predicates {
predicate.bind_parameters(params)?;
}
input.bind_parameters(params)
}
HirRelationExpr::Reduce {
aggregates, input, ..
} => {
for aggregate in aggregates {
aggregate.bind_parameters(params)?;
}
input.bind_parameters(params)
}
HirRelationExpr::Union { base, inputs } => {
for input in inputs {
input.bind_parameters(params)?;
}
base.bind_parameters(params)
}
HirRelationExpr::Project { input, .. }
| HirRelationExpr::Distinct { input, .. }
| HirRelationExpr::TopK { input, .. }
| HirRelationExpr::Negate { input, .. }
| HirRelationExpr::DeclareKeys { input, .. }
| HirRelationExpr::Threshold { input, .. } => input.bind_parameters(params),
HirRelationExpr::Constant { .. } | HirRelationExpr::Get { .. } => Ok(()),
}
}
/// See the documentation for [`HirScalarExpr::splice_parameters`].
pub fn splice_parameters(&mut self, params: &[HirScalarExpr], depth: usize) {
match self {
HirRelationExpr::Join {
kind,
on,
left,
right,
} => {
left.splice_parameters(params, depth);
let depth = if kind.is_lateral() { depth + 1 } else { depth };
right.splice_parameters(params, depth);
on.splice_parameters(params, depth);
}
HirRelationExpr::Map { scalars, input } => {
for scalar in scalars {
scalar.splice_parameters(params, depth);
}
input.splice_parameters(params, depth);
}
HirRelationExpr::CallTable { exprs, .. } => {
for expr in exprs {
expr.splice_parameters(params, depth);
}
}
HirRelationExpr::Filter { predicates, input } => {
for predicate in predicates {
predicate.splice_parameters(params, depth);
}
input.splice_parameters(params, depth);
}
HirRelationExpr::Reduce {
aggregates, input, ..
} => {
for aggregate in aggregates {
aggregate.expr.splice_parameters(params, depth);
}
input.splice_parameters(params, depth);
}
HirRelationExpr::Union { base, inputs } => {
base.splice_parameters(params, depth);
for input in inputs {
input.splice_parameters(params, depth);
}
}
HirRelationExpr::Project { input, .. }
| HirRelationExpr::Distinct { input }
| HirRelationExpr::TopK { input, .. }
| HirRelationExpr::Negate { input }
| HirRelationExpr::DeclareKeys { input, .. }
| HirRelationExpr::Threshold { input } => {
input.splice_parameters(params, depth);
}
HirRelationExpr::Constant { .. } | HirRelationExpr::Get { .. } => (),
}
}
/// Constructs a constant collection from specific rows and schema.
pub fn constant(rows: Vec<Vec<Datum>>, typ: RelationType) -> Self {
let mut row_packer = repr::RowPacker::new();
let rows = rows
.into_iter()
.map(move |datums| row_packer.pack(datums))
.collect();
HirRelationExpr::Constant { rows, typ }
}
pub fn finish(&mut self, finishing: expr::RowSetFinishing) {
if !finishing.is_trivial(self.arity()) {
*self = HirRelationExpr::Project {
input: Box::new(HirRelationExpr::TopK {
input: Box::new(std::mem::replace(
self,
HirRelationExpr::Constant {
rows: vec![],
typ: RelationType::new(Vec::new()),
},
)),
group_key: vec![],
order_key: finishing.order_by,
limit: finishing.limit,
offset: finishing.offset,
}),
outputs: finishing.project,
}
}
}
}
impl HirScalarExpr {
/// Replaces any parameter references in the expression with the
/// corresponding datum in `params`.
pub fn bind_parameters(&mut self, params: &Params) -> Result<(), anyhow::Error> {
match self {
HirScalarExpr::Literal(_, _)
| HirScalarExpr::Column(_)
| HirScalarExpr::CallNullary(_) => Ok(()),
HirScalarExpr::Parameter(n) => {
let datum = match params.datums.iter().nth(*n - 1) {
None => bail!("there is no parameter ${}", n),
Some(datum) => datum,
};
let scalar_type = ¶ms.types[*n - 1];
let row = Row::pack(&[datum]);
let column_type = scalar_type.clone().nullable(datum.is_null());
*self = HirScalarExpr::Literal(row, column_type);
Ok(())
}
HirScalarExpr::CallUnary { expr, .. } => expr.bind_parameters(params),
HirScalarExpr::CallBinary { expr1, expr2, .. } => {
expr1.bind_parameters(params)?;
expr2.bind_parameters(params)
}
HirScalarExpr::CallVariadic { exprs, .. } => {
for expr in exprs {
expr.bind_parameters(params)?;
}
Ok(())
}
HirScalarExpr::If { cond, then, els } => {
cond.bind_parameters(params)?;
then.bind_parameters(params)?;
els.bind_parameters(params)
}
HirScalarExpr::Exists(expr) | HirScalarExpr::Select(expr) => {
expr.bind_parameters(params)
}
}
}
// Like [`ScalarExpr::bind_parameters`]`, except that parameters are
// replaced with the corresponding expression fragment from `params` rather
// than a datum.
///
/// Specifically, the parameter `$1` will be replaced with `params[0]`, the
/// parameter `$2` will be replaced with `params[1]`, and so on. Parameters
/// in `self` that refer to invalid indices of `params` will cause a panic.
///
/// Column references in parameters will be corrected to account for the
/// depth at which they are spliced.
pub fn splice_parameters(&mut self, params: &[HirScalarExpr], depth: usize) {
self.visit_mut(&mut |e| match e {
HirScalarExpr::Parameter(i) => {
*e = params[*i - 1].clone();
// Correct any column references in the parameter expression for
// its new depth.
e.visit_columns(0, &mut |_, col| col.level += depth);
}
HirScalarExpr::Exists(e) | HirScalarExpr::Select(e) => {
e.splice_parameters(params, depth + 1)
}
_ => (),
})
}
pub fn literal(datum: Datum, scalar_type: ScalarType) -> HirScalarExpr {
let row = Row::pack(&[datum]);
HirScalarExpr::Literal(row, scalar_type.nullable(datum.is_null()))
}
pub fn literal_true() -> HirScalarExpr {
HirScalarExpr::literal(Datum::True, ScalarType::Bool)
}
pub fn literal_null(scalar_type: ScalarType) -> HirScalarExpr {
HirScalarExpr::literal(Datum::Null, scalar_type)
}
pub fn literal_1d_array(
datums: Vec<Datum>,
element_scalar_type: ScalarType,
) -> Result<HirScalarExpr, anyhow::Error> {
let scalar_type = match element_scalar_type {
ScalarType::Array(_) => {
return Err(anyhow::anyhow!("cannot build array from array type"))
}
typ => ScalarType::Array(Box::new(typ)).nullable(false),
};
let mut packer = RowPacker::new();
packer.push_array(
&[ArrayDimension {
lower_bound: 1,
length: datums.len(),
}],
datums,
)?;
let row = packer.finish();
Ok(HirScalarExpr::Literal(row, scalar_type))
}
pub fn call_unary(self, func: UnaryFunc) -> Self {
HirScalarExpr::CallUnary {
func,
expr: Box::new(self),
}
}
pub fn call_binary(self, other: Self, func: BinaryFunc) -> Self {
HirScalarExpr::CallBinary {
func,
expr1: Box::new(self),
expr2: Box::new(other),
}
}
pub fn take(&mut self) -> Self {
mem::replace(self, HirScalarExpr::literal_null(ScalarType::String))
}
pub fn visit<'a, F>(&'a self, f: &mut F)
where
F: FnMut(&'a Self),
{
self.visit1(|e: &HirScalarExpr| e.visit(f));
f(self);
}
pub fn visit1<'a, F>(&'a self, mut f: F)
where
F: FnMut(&'a Self),
{
use HirScalarExpr::*;
match self {
Column(..) | Parameter(..) | Literal(..) | CallNullary(..) => (),
CallUnary { expr, .. } => f(expr),
CallBinary { expr1, expr2, .. } => {
f(expr1);
f(expr2);
}
CallVariadic { exprs, .. } => {
for expr in exprs {
f(expr);
}
}
If { cond, then, els } => {
f(cond);
f(then);
f(els);
}
Exists(..) | Select(..) => (),
}
}
pub fn visit_mut<F>(&mut self, f: &mut F)
where
F: FnMut(&mut Self),
{
self.visit1_mut(|e: &mut HirScalarExpr| e.visit_mut(f));
f(self);
}
pub fn visit_mut_pre<F>(&mut self, f: &mut F)
where
F: FnMut(&mut Self),
{
f(self);
self.visit1_mut(|e: &mut HirScalarExpr| e.visit_mut(f));
}
pub fn visit1_mut<F>(&mut self, mut f: F)
where
F: FnMut(&mut Self),
{
use HirScalarExpr::*;
match self {
Column(..) | Parameter(..) | Literal(..) | CallNullary(..) => (),
CallUnary { expr, .. } => f(expr),
CallBinary { expr1, expr2, .. } => {
f(expr1);
f(expr2);
}
CallVariadic { exprs, .. } => {
for expr in exprs {
f(expr);
}
}
If { cond, then, els } => {
f(cond);
f(then);
f(els);
}
Exists(..) | Select(..) => (),
}
}
/// Visits the column references in this scalar expression.
///
/// The `depth` argument should indicate the subquery nesting depth of the expression,
/// which will be incremented with each subquery entered and presented to the supplied
/// function `f`.
pub fn visit_columns<F>(&mut self, depth: usize, f: &mut F)
where
F: FnMut(usize, &mut ColumnRef),
{
match self {
HirScalarExpr::Literal(_, _)
| HirScalarExpr::Parameter(_)
| HirScalarExpr::CallNullary(_) => (),
HirScalarExpr::Column(col_ref) => f(depth, col_ref),
HirScalarExpr::CallUnary { expr, .. } => expr.visit_columns(depth, f),
HirScalarExpr::CallBinary { expr1, expr2, .. } => {
expr1.visit_columns(depth, f);
expr2.visit_columns(depth, f);
}
HirScalarExpr::CallVariadic { exprs, .. } => {
for expr in exprs {
expr.visit_columns(depth, f);
}
}
HirScalarExpr::If { cond, then, els } => {
cond.visit_columns(depth, f);
then.visit_columns(depth, f);
els.visit_columns(depth, f);
}
HirScalarExpr::Exists(expr) | HirScalarExpr::Select(expr) => {
expr.visit_columns(depth + 1, f);
}
}
}
fn simplify_to_literal(self) -> Option<Row> {
let mut expr = self.lower_uncorrelated().ok()?;
expr.reduce(&repr::RelationType::empty());
match expr {
expr::MirScalarExpr::Literal(Ok(row), _) => Some(row),
_ => None,
}
}
/// Attempts to simplify this expression to a literal 64-bit integer.
///
/// Returns `None` if this expression cannot be simplified, e.g. because it
/// contains non-literal values.
///
/// # Panics
///
/// Panics if this expression does not have type [`ScalarType::Int64`].
pub fn into_literal_int64(self) -> Option<i64> {
self.simplify_to_literal().and_then(|row| {
let datum = row.unpack_first();
if datum.is_null() {
None
} else {
Some(datum.unwrap_int64())
}
})
}
/// Attempts to simplify this expression to a literal string.
///
/// Returns `None` if this expression cannot be simplified, e.g. because it
/// contains non-literal values.
///
/// # Panics
///
/// Panics if this expression does not have type [`ScalarType::String`].
pub fn into_literal_string(self) -> Option<String> {
self.simplify_to_literal().and_then(|row| {
let datum = row.unpack_first();
if datum.is_null() {
None
} else {
Some(datum.unwrap_str().to_owned())
}
})
}
}
impl AbstractExpr for HirScalarExpr {
type Type = ColumnType;
fn typ(
&self,
outers: &[RelationType],
inner: &RelationType,
params: &BTreeMap<usize, ScalarType>,
) -> Self::Type {
match self {
HirScalarExpr::Column(ColumnRef { level, column }) => {
if *level == 0 {
inner.column_types[*column].clone()
} else {
outers[outers.len() - *level].column_types[*column].clone()
}
}
HirScalarExpr::Parameter(n) => params[&n].clone().nullable(true),
HirScalarExpr::Literal(_, typ) => typ.clone(),
HirScalarExpr::CallNullary(func) => func.output_type(),
HirScalarExpr::CallUnary { expr, func } => {
func.output_type(expr.typ(outers, inner, params))
}
HirScalarExpr::CallBinary { expr1, expr2, func } => func.output_type(
expr1.typ(outers, inner, params),
expr2.typ(outers, inner, params),
),
HirScalarExpr::CallVariadic { exprs, func } => {
func.output_type(exprs.iter().map(|e| e.typ(outers, inner, params)).collect())
}
HirScalarExpr::If { cond: _, then, els } => {
let then_type = then.typ(outers, inner, params);
let else_type = els.typ(outers, inner, params);
then_type.union(&else_type).unwrap()
}
HirScalarExpr::Exists(_) => ScalarType::Bool.nullable(true),
HirScalarExpr::Select(expr) => {
let mut outers = outers.to_vec();
outers.push(inner.clone());
expr.typ(&outers, params)
.column_types
.into_element()
.nullable(true)
}
}
}
}
impl AggregateExpr {
/// Replaces any parameter references in the expression with the
/// corresponding datum from `parameters`.
pub fn bind_parameters(&mut self, params: &Params) -> Result<(), anyhow::Error> {
self.expr.bind_parameters(params)
}
pub fn typ(
&self,
outers: &[RelationType],
inner: &RelationType,
params: &BTreeMap<usize, ScalarType>,
) -> ColumnType {
self.func.output_type(self.expr.typ(outers, inner, params))
}
/// Visits the column references in this aggregate expression.
///
/// The `depth` argument should indicate the subquery nesting depth of the expression,
/// which will be incremented with each subquery entered and presented to the supplied
/// function `f`.
pub fn visit_columns<F>(&mut self, depth: usize, f: &mut F)
where
F: FnMut(usize, &mut ColumnRef),
{
self.expr.visit_columns(depth, f);
}
}
| 34.125535 | 130 | 0.531038 |
11c94140c24ad195b7eb529d1297b9d5fc38c426 | 9,331 | #![no_std]
#[cfg(feature = "graphics")]
extern crate embedded_graphics;
use embedded_hal::blocking::delay::DelayMs;
use embedded_hal::digital::v2::OutputPin;
use core::iter::once;
use display_interface::DataFormat::{U16BEIter, U8Iter};
use display_interface::WriteOnlyDataCommand;
pub mod spi;
/// Trait representing the interface to the hardware.
///
/// Intended to abstract the various buses (SPI, MPU 8/9/16-bit) from the Controller code.
pub trait Interface {
type Error;
/// Sends a command with a sequence of 8-bit arguments
///
/// Mostly used for sending configuration commands
fn write(&mut self, command: u8, data: &[u8]) -> Result<(), Self::Error>;
/// Sends a command with a sequence of 16-bit data words
///
/// Mostly used for sending MemoryWrite command and other commands
/// with 16-bit arguments
fn write_iter(
&mut self,
command: u8,
data: impl IntoIterator<Item = u16>,
) -> Result<(), Self::Error>;
}
const WIDTH: usize = 240;
const HEIGHT: usize = 320;
#[derive(Debug)]
pub enum Error<PinE> {
Interface,
OutputPin(PinE),
}
/// The default orientation is Portrait
pub enum Orientation {
Portrait,
PortraitFlipped,
Landscape,
LandscapeFlipped,
}
/// There are two method for drawing to the screen:
/// [draw_raw](struct.Ili9341.html#method.draw_raw) and
/// [draw_rect_iter](struct.Ili9341.html#method.draw_rect_iter).
///
/// In both cases the expected pixel format is rgb565.
///
/// The hardware makes it efficient to draw rectangles on the screen.
///
/// What happens is the following:
///
/// - A drawing window is prepared (with the 2 opposite corner coordinates)
/// - The starting point for drawint is the top left corner of this window
/// - Every pair of bytes received is intepreted as a pixel value in rgb565
/// - As soon as a pixel is received, an internal counter is incremented,
/// and the next word will fill the next pixel (the adjacent on the right, or
/// the first of the next row if the row ended)
pub struct Ili9341<IFACE, RESET> {
interface: IFACE,
reset: RESET,
width: usize,
height: usize,
}
impl<PinE, IFACE, RESET> Ili9341<IFACE, RESET>
where
IFACE: WriteOnlyDataCommand,
RESET: OutputPin<Error = PinE>,
{
pub fn new<DELAY: DelayMs<u16>>(
interface: IFACE,
reset: RESET,
delay: &mut DELAY,
) -> Result<Self, Error<PinE>> {
let mut ili9341 = Ili9341 {
interface,
reset,
width: WIDTH,
height: HEIGHT,
};
ili9341.hard_reset(delay).map_err(Error::OutputPin)?;
ili9341.command(Command::SoftwareReset, &[])?;
delay.delay_ms(200);
ili9341.command(Command::PowerControlA, &[0x39, 0x2c, 0x00, 0x34, 0x02])?;
ili9341.command(Command::PowerControlB, &[0x00, 0xc1, 0x30])?;
ili9341.command(Command::DriverTimingControlA, &[0x85, 0x00, 0x78])?;
ili9341.command(Command::DriverTimingControlB, &[0x00, 0x00])?;
ili9341.command(Command::PowerOnSequenceControl, &[0x64, 0x03, 0x12, 0x81])?;
ili9341.command(Command::PumpRatioControl, &[0x20])?;
ili9341.command(Command::PowerControl1, &[0x23])?;
ili9341.command(Command::PowerControl2, &[0x10])?;
ili9341.command(Command::VCOMControl1, &[0x3e, 0x28])?;
ili9341.command(Command::VCOMControl2, &[0x86])?;
ili9341.command(Command::MemoryAccessControl, &[0x48])?;
ili9341.command(Command::PixelFormatSet, &[0x55])?;
ili9341.command(Command::FrameControlNormal, &[0x00, 0x18])?;
ili9341.command(Command::DisplayFunctionControl, &[0x08, 0x82, 0x27])?;
ili9341.command(Command::Enable3G, &[0x00])?;
ili9341.command(Command::GammaSet, &[0x01])?;
ili9341.command(
Command::PositiveGammaCorrection,
&[
0x0f, 0x31, 0x2b, 0x0c, 0x0e, 0x08, 0x4e, 0xf1, 0x37, 0x07, 0x10, 0x03, 0x0e, 0x09,
0x00,
],
)?;
ili9341.command(
Command::NegativeGammaCorrection,
&[
0x00, 0x0e, 0x14, 0x03, 0x11, 0x07, 0x31, 0xc1, 0x48, 0x08, 0x0f, 0x0c, 0x31, 0x36,
0x0f,
],
)?;
ili9341.command(Command::SleepOut, &[])?;
delay.delay_ms(120);
ili9341.command(Command::DisplayOn, &[])?;
Ok(ili9341)
}
fn hard_reset<DELAY: DelayMs<u16>>(&mut self, delay: &mut DELAY) -> Result<(), PinE> {
// set high if previously low
self.reset.set_high()?;
delay.delay_ms(200);
// set low for reset
self.reset.set_low()?;
delay.delay_ms(200);
// set high for normal operation
self.reset.set_high()?;
delay.delay_ms(200);
Ok(())
}
fn command(&mut self, cmd: Command, args: &[u8]) -> Result<(), Error<PinE>> {
self.interface
.send_commands(U8Iter(&mut once(cmd as u8)))
.map_err(|_| Error::Interface)?;
self.interface
.send_data(U8Iter(&mut args.iter().cloned()))
.map_err(|_| Error::Interface)
}
fn write_iter<I: IntoIterator<Item = u16>>(&mut self, data: I) -> Result<(), Error<PinE>> {
self.command(Command::MemoryWrite, &[])?;
self.interface
.send_data(U16BEIter(&mut data.into_iter()))
.map_err(|_| Error::Interface)
}
fn set_window(&mut self, x0: u16, y0: u16, x1: u16, y1: u16) -> Result<(), Error<PinE>> {
self.command(
Command::ColumnAddressSet,
&[
(x0 >> 8) as u8,
(x0 & 0xff) as u8,
(x1 >> 8) as u8,
(x1 & 0xff) as u8,
],
)?;
self.command(
Command::PageAddressSet,
&[
(y0 >> 8) as u8,
(y0 & 0xff) as u8,
(y1 >> 8) as u8,
(y1 & 0xff) as u8,
],
)?;
Ok(())
}
/// Draw a rectangle on the screen, represented by top-left corner (x0, y0)
/// and bottom-right corner (x1, y1).
///
/// The border is included.
///
/// This method accepts an iterator of rgb565 pixel values.
///
/// The iterator is useful to avoid wasting memory by holding a buffer for
/// the whole screen when it is not necessary.
pub fn draw_rect_iter<I: IntoIterator<Item = u16>>(
&mut self,
x0: u16,
y0: u16,
x1: u16,
y1: u16,
data: I,
) -> Result<(), Error<PinE>> {
self.set_window(x0, y0, x1, y1)?;
self.write_iter(data)
}
/// Draw a rectangle on the screen, represented by top-left corner (x0, y0)
/// and bottom-right corner (x1, y1).
///
/// The border is included.
///
/// This method accepts a raw buffer of words that will be copied to the screen
/// video memory.
///
/// The expected format is rgb565.
pub fn draw_raw(
&mut self,
x0: u16,
y0: u16,
x1: u16,
y1: u16,
data: &[u16],
) -> Result<(), Error<PinE>> {
self.set_window(x0, y0, x1, y1)?;
self.write_iter(data.iter().cloned())
}
/// Change the orientation of the screen
pub fn set_orientation(&mut self, mode: Orientation) -> Result<(), Error<PinE>> {
match mode {
Orientation::Portrait => {
self.width = WIDTH;
self.height = HEIGHT;
self.command(Command::MemoryAccessControl, &[0x40 | 0x08])
}
Orientation::Landscape => {
self.width = HEIGHT;
self.height = WIDTH;
self.command(Command::MemoryAccessControl, &[0x20 | 0x08])
}
Orientation::PortraitFlipped => {
self.width = WIDTH;
self.height = HEIGHT;
self.command(Command::MemoryAccessControl, &[0x80 | 0x08])
}
Orientation::LandscapeFlipped => {
self.width = HEIGHT;
self.height = WIDTH;
self.command(Command::MemoryAccessControl, &[0x40 | 0x80 | 0x20 | 0x08])
}
}
}
/// Get the current screen width. It can change based on the current orientation
pub fn width(&self) -> usize {
self.width
}
/// Get the current screen heighth. It can change based on the current orientation
pub fn height(&self) -> usize {
self.height
}
}
#[cfg(feature = "graphics")]
mod graphics;
#[derive(Clone, Copy)]
enum Command {
SoftwareReset = 0x01,
PowerControlA = 0xcb,
PowerControlB = 0xcf,
DriverTimingControlA = 0xe8,
DriverTimingControlB = 0xea,
PowerOnSequenceControl = 0xed,
PumpRatioControl = 0xf7,
PowerControl1 = 0xc0,
PowerControl2 = 0xc1,
VCOMControl1 = 0xc5,
VCOMControl2 = 0xc7,
MemoryAccessControl = 0x36,
PixelFormatSet = 0x3a,
FrameControlNormal = 0xb1,
DisplayFunctionControl = 0xb6,
Enable3G = 0xf2,
GammaSet = 0x26,
PositiveGammaCorrection = 0xe0,
NegativeGammaCorrection = 0xe1,
SleepOut = 0x11,
DisplayOn = 0x29,
ColumnAddressSet = 0x2a,
PageAddressSet = 0x2b,
MemoryWrite = 0x2c,
}
| 31.630508 | 99 | 0.580645 |
1ee04a46243a264f1e73aa7e2149e47ddf58e5fa | 93,719 | // Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
use back::lto::{self, ModuleBuffer, ThinBuffer};
use back::link::{self, get_linker, remove};
use back::linker::LinkerInfo;
use back::symbol_export::ExportedSymbols;
use base;
use consts;
use rustc_incremental::{save_trans_partition, in_incr_comp_dir};
use rustc::dep_graph::{DepGraph, WorkProductFileKind};
use rustc::middle::cstore::{LinkMeta, EncodedMetadata};
use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
AllPasses, Sanitizer};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use rustc_back::LinkerFlavor;
use time_graph::{self, TimeGraph, Timeline};
use llvm;
use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef};
use llvm::{SMDiagnosticRef, ContextRef};
use {CrateTranslation, ModuleSource, ModuleTranslation, CompiledModule, ModuleKind};
use CrateInfo;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt;
use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
use rustc::util::fs::{link_or_copy, rename_or_copy_remove};
use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
use errors::emitter::{Emitter};
use syntax::attr;
use syntax::ext::hygiene::Mark;
use syntax_pos::MultiSpan;
use syntax_pos::symbol::Symbol;
use type_::Type;
use context::{is_pie_binary, get_reloc_model};
use jobserver::{Client, Acquired};
use rustc_demangle;
use std::any::Any;
use std::ffi::{CString, CStr};
use std::fs;
use std::io::{self, Write};
use std::mem;
use std::path::{Path, PathBuf};
use std::str;
use std::sync::Arc;
use std::sync::mpsc::{channel, Sender, Receiver};
use std::slice;
use std::time::Instant;
use std::thread;
use libc::{c_uint, c_void, c_char, size_t};
pub const RELOC_MODEL_ARGS : [(&'static str, llvm::RelocMode); 7] = [
("pic", llvm::RelocMode::PIC),
("static", llvm::RelocMode::Static),
("default", llvm::RelocMode::Default),
("dynamic-no-pic", llvm::RelocMode::DynamicNoPic),
("ropi", llvm::RelocMode::ROPI),
("rwpi", llvm::RelocMode::RWPI),
("ropi-rwpi", llvm::RelocMode::ROPI_RWPI),
];
pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [
("default", llvm::CodeModel::Default),
("small", llvm::CodeModel::Small),
("kernel", llvm::CodeModel::Kernel),
("medium", llvm::CodeModel::Medium),
("large", llvm::CodeModel::Large),
];
pub const TLS_MODEL_ARGS : [(&'static str, llvm::ThreadLocalMode); 4] = [
("global-dynamic", llvm::ThreadLocalMode::GeneralDynamic),
("local-dynamic", llvm::ThreadLocalMode::LocalDynamic),
("initial-exec", llvm::ThreadLocalMode::InitialExec),
("local-exec", llvm::ThreadLocalMode::LocalExec),
];
pub fn llvm_err(handler: &errors::Handler, msg: String) -> FatalError {
match llvm::last_error() {
Some(err) => handler.fatal(&format!("{}: {}", msg, err)),
None => handler.fatal(&msg),
}
}
pub fn write_output_file(
handler: &errors::Handler,
target: llvm::TargetMachineRef,
pm: llvm::PassManagerRef,
m: ModuleRef,
output: &Path,
file_type: llvm::FileType) -> Result<(), FatalError> {
unsafe {
let output_c = path2cstr(output);
let result = llvm::LLVMRustWriteOutputFile(
target, pm, m, output_c.as_ptr(), file_type);
if result.into_result().is_err() {
let msg = format!("could not write output to {}", output.display());
Err(llvm_err(handler, msg))
} else {
Ok(())
}
}
}
// On android, we by default compile for armv7 processors. This enables
// things like double word CAS instructions (rather than emulating them)
// which are *far* more efficient. This is obviously undesirable in some
// cases, so if any sort of target feature is specified we don't append v7
// to the feature list.
//
// On iOS only armv7 and newer are supported. So it is useful to
// get all hardware potential via VFP3 (hardware floating point)
// and NEON (SIMD) instructions supported by LLVM.
// Note that without those flags various linking errors might
// arise as some of intrinsics are converted into function calls
// and nobody provides implementations those functions
fn target_feature(sess: &Session) -> String {
let rustc_features = [
"crt-static",
];
let requested_features = sess.opts.cg.target_feature.split(',');
let llvm_features = requested_features.filter(|f| {
!rustc_features.iter().any(|s| f.contains(s))
});
format!("{},{}",
sess.target.target.options.features,
llvm_features.collect::<Vec<_>>().join(","))
}
fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel {
match optimize {
config::OptLevel::No => llvm::CodeGenOptLevel::None,
config::OptLevel::Less => llvm::CodeGenOptLevel::Less,
config::OptLevel::Default => llvm::CodeGenOptLevel::Default,
config::OptLevel::Aggressive => llvm::CodeGenOptLevel::Aggressive,
_ => llvm::CodeGenOptLevel::Default,
}
}
fn get_llvm_opt_size(optimize: config::OptLevel) -> llvm::CodeGenOptSize {
match optimize {
config::OptLevel::Size => llvm::CodeGenOptSizeDefault,
config::OptLevel::SizeMin => llvm::CodeGenOptSizeAggressive,
_ => llvm::CodeGenOptSizeNone,
}
}
pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
target_machine_factory(sess)().unwrap_or_else(|err| {
panic!(llvm_err(sess.diagnostic(), err))
})
}
pub fn target_machine_factory(sess: &Session)
-> Arc<Fn() -> Result<TargetMachineRef, String> + Send + Sync>
{
let reloc_model = get_reloc_model(sess);
let opt_level = get_llvm_opt_level(sess.opts.optimize);
let use_softfp = sess.opts.cg.soft_float;
let ffunction_sections = sess.target.target.options.function_sections;
let fdata_sections = ffunction_sections;
let code_model_arg = match sess.opts.cg.code_model {
Some(ref s) => &s,
None => &sess.target.target.options.code_model,
};
let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
|&&arg| arg.0 == code_model_arg) {
Some(x) => x.1,
_ => {
sess.err(&format!("{:?} is not a valid code model",
code_model_arg));
sess.abort_if_errors();
bug!();
}
};
let singlethread = sess.target.target.options.singlethread;
let triple = &sess.target.target.llvm_target;
let triple = CString::new(triple.as_bytes()).unwrap();
let cpu = match sess.opts.cg.target_cpu {
Some(ref s) => &**s,
None => &*sess.target.target.options.cpu
};
let cpu = CString::new(cpu.as_bytes()).unwrap();
let features = CString::new(target_feature(sess).as_bytes()).unwrap();
let is_pie_binary = is_pie_binary(sess);
let trap_unreachable = sess.target.target.options.trap_unreachable;
Arc::new(move || {
let tm = unsafe {
llvm::LLVMRustCreateTargetMachine(
triple.as_ptr(), cpu.as_ptr(), features.as_ptr(),
code_model,
reloc_model,
opt_level,
use_softfp,
is_pie_binary,
ffunction_sections,
fdata_sections,
trap_unreachable,
singlethread,
)
};
if tm.is_null() {
Err(format!("Could not create LLVM TargetMachine for triple: {}",
triple.to_str().unwrap()))
} else {
Ok(tm)
}
})
}
/// Module-specific configuration for `optimize_and_codegen`.
pub struct ModuleConfig {
/// Names of additional optimization passes to run.
passes: Vec<String>,
/// Some(level) to optimize at a certain level, or None to run
/// absolutely no optimizations (used for the metadata module).
pub opt_level: Option<llvm::CodeGenOptLevel>,
/// Some(level) to optimize binary size, or None to not affect program size.
opt_size: Option<llvm::CodeGenOptSize>,
// Flags indicating which outputs to produce.
emit_no_opt_bc: bool,
emit_bc: bool,
emit_bc_compressed: bool,
emit_lto_bc: bool,
emit_ir: bool,
emit_asm: bool,
emit_obj: bool,
// Miscellaneous flags. These are mostly copied from command-line
// options.
no_verify: bool,
no_prepopulate_passes: bool,
no_builtins: bool,
time_passes: bool,
vectorize_loop: bool,
vectorize_slp: bool,
merge_functions: bool,
inline_threshold: Option<usize>,
// Instead of creating an object file by doing LLVM codegen, just
// make the object file bitcode. Provides easy compatibility with
// emscripten's ecc compiler, when used as the linker.
obj_is_bitcode: bool,
}
impl ModuleConfig {
fn new(passes: Vec<String>) -> ModuleConfig {
ModuleConfig {
passes,
opt_level: None,
opt_size: None,
emit_no_opt_bc: false,
emit_bc: false,
emit_bc_compressed: false,
emit_lto_bc: false,
emit_ir: false,
emit_asm: false,
emit_obj: false,
obj_is_bitcode: false,
no_verify: false,
no_prepopulate_passes: false,
no_builtins: false,
time_passes: false,
vectorize_loop: false,
vectorize_slp: false,
merge_functions: false,
inline_threshold: None
}
}
fn set_flags(&mut self, sess: &Session, no_builtins: bool) {
self.no_verify = sess.no_verify();
self.no_prepopulate_passes = sess.opts.cg.no_prepopulate_passes;
self.no_builtins = no_builtins || sess.target.target.options.no_builtins;
self.time_passes = sess.time_passes();
self.inline_threshold = sess.opts.cg.inline_threshold;
self.obj_is_bitcode = sess.target.target.options.obj_is_bitcode;
// Copy what clang does by turning on loop vectorization at O2 and
// slp vectorization at O3. Otherwise configure other optimization aspects
// of this pass manager builder.
// Turn off vectorization for emscripten, as it's not very well supported.
self.vectorize_loop = !sess.opts.cg.no_vectorize_loops &&
(sess.opts.optimize == config::OptLevel::Default ||
sess.opts.optimize == config::OptLevel::Aggressive) &&
!sess.target.target.options.is_like_emscripten;
self.vectorize_slp = !sess.opts.cg.no_vectorize_slp &&
sess.opts.optimize == config::OptLevel::Aggressive &&
!sess.target.target.options.is_like_emscripten;
self.merge_functions = sess.opts.optimize == config::OptLevel::Default ||
sess.opts.optimize == config::OptLevel::Aggressive;
}
}
/// Additional resources used by optimize_and_codegen (not module specific)
#[derive(Clone)]
pub struct CodegenContext {
// Resouces needed when running LTO
pub time_passes: bool,
pub lto: bool,
pub thinlto: bool,
pub no_landing_pads: bool,
pub save_temps: bool,
pub fewer_names: bool,
pub exported_symbols: Arc<ExportedSymbols>,
pub opts: Arc<config::Options>,
pub crate_types: Vec<config::CrateType>,
pub each_linked_rlib_for_lto: Vec<(CrateNum, PathBuf)>,
output_filenames: Arc<OutputFilenames>,
regular_module_config: Arc<ModuleConfig>,
metadata_module_config: Arc<ModuleConfig>,
allocator_module_config: Arc<ModuleConfig>,
pub tm_factory: Arc<Fn() -> Result<TargetMachineRef, String> + Send + Sync>,
pub msvc_imps_needed: bool,
pub target_pointer_width: String,
binaryen_linker: bool,
debuginfo: config::DebugInfoLevel,
wasm_import_memory: bool,
// Number of cgus excluding the allocator/metadata modules
pub total_cgus: usize,
// Handler to use for diagnostics produced during codegen.
pub diag_emitter: SharedEmitter,
// LLVM passes added by plugins.
pub plugin_passes: Vec<String>,
// LLVM optimizations for which we want to print remarks.
pub remark: Passes,
// Worker thread number
pub worker: usize,
// The incremental compilation session directory, or None if we are not
// compiling incrementally
pub incr_comp_session_dir: Option<PathBuf>,
// Channel back to the main control thread to send messages to
coordinator_send: Sender<Box<Any + Send>>,
// A reference to the TimeGraph so we can register timings. None means that
// measuring is disabled.
time_graph: Option<TimeGraph>,
}
impl CodegenContext {
pub fn create_diag_handler(&self) -> Handler {
Handler::with_emitter(true, false, Box::new(self.diag_emitter.clone()))
}
pub fn config(&self, kind: ModuleKind) -> &ModuleConfig {
match kind {
ModuleKind::Regular => &self.regular_module_config,
ModuleKind::Metadata => &self.metadata_module_config,
ModuleKind::Allocator => &self.allocator_module_config,
}
}
pub fn save_temp_bitcode(&self, trans: &ModuleTranslation, name: &str) {
if !self.save_temps {
return
}
unsafe {
let ext = format!("{}.bc", name);
let cgu = Some(&trans.name[..]);
let path = self.output_filenames.temp_path_ext(&ext, cgu);
let cstr = path2cstr(&path);
let llmod = trans.llvm().unwrap().llmod;
llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr());
}
}
}
struct DiagnosticHandlers<'a> {
inner: Box<(&'a CodegenContext, &'a Handler)>,
llcx: ContextRef,
}
impl<'a> DiagnosticHandlers<'a> {
fn new(cgcx: &'a CodegenContext,
handler: &'a Handler,
llcx: ContextRef) -> DiagnosticHandlers<'a> {
let data = Box::new((cgcx, handler));
unsafe {
let arg = &*data as &(_, _) as *const _ as *mut _;
llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, arg);
llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, arg);
}
DiagnosticHandlers {
inner: data,
llcx: llcx,
}
}
}
impl<'a> Drop for DiagnosticHandlers<'a> {
fn drop(&mut self) {
unsafe {
llvm::LLVMRustSetInlineAsmDiagnosticHandler(self.llcx, inline_asm_handler, 0 as *mut _);
llvm::LLVMContextSetDiagnosticHandler(self.llcx, diagnostic_handler, 0 as *mut _);
}
}
}
unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext,
msg: &'b str,
cookie: c_uint) {
cgcx.diag_emitter.inline_asm_error(cookie as u32, msg.to_string());
}
unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef,
user: *const c_void,
cookie: c_uint) {
if user.is_null() {
return
}
let (cgcx, _) = *(user as *const (&CodegenContext, &Handler));
let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s))
.expect("non-UTF8 SMDiagnostic");
report_inline_asm(cgcx, &msg, cookie);
}
unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
if user.is_null() {
return
}
let (cgcx, diag_handler) = *(user as *const (&CodegenContext, &Handler));
match llvm::diagnostic::Diagnostic::unpack(info) {
llvm::diagnostic::InlineAsm(inline) => {
report_inline_asm(cgcx,
&llvm::twine_to_string(inline.message),
inline.cookie);
}
llvm::diagnostic::Optimization(opt) => {
let enabled = match cgcx.remark {
AllPasses => true,
SomePasses(ref v) => v.iter().any(|s| *s == opt.pass_name),
};
if enabled {
diag_handler.note_without_error(&format!("optimization {} for {} at {}:{}:{}: {}",
opt.kind.describe(),
opt.pass_name,
opt.filename,
opt.line,
opt.column,
opt.message));
}
}
_ => (),
}
}
// Unsafe due to LLVM calls.
unsafe fn optimize(cgcx: &CodegenContext,
diag_handler: &Handler,
mtrans: &ModuleTranslation,
config: &ModuleConfig,
timeline: &mut Timeline)
-> Result<(), FatalError>
{
let (llmod, llcx, tm) = match mtrans.source {
ModuleSource::Translated(ref llvm) => (llvm.llmod, llvm.llcx, llvm.tm),
ModuleSource::Preexisting(_) => {
bug!("optimize_and_codegen: called with ModuleSource::Preexisting")
}
};
let _handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
let module_name = mtrans.name.clone();
let module_name = Some(&module_name[..]);
if config.emit_no_opt_bc {
let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name);
let out = path2cstr(&out);
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
}
if config.opt_level.is_some() {
// Create the two optimizing pass managers. These mirror what clang
// does, and are by populated by LLVM's default PassManagerBuilder.
// Each manager has a different set of passes, but they also share
// some common passes.
let fpm = llvm::LLVMCreateFunctionPassManagerForModule(llmod);
let mpm = llvm::LLVMCreatePassManager();
// If we're verifying or linting, add them to the function pass
// manager.
let addpass = |pass_name: &str| {
let pass_name = CString::new(pass_name).unwrap();
let pass = llvm::LLVMRustFindAndCreatePass(pass_name.as_ptr());
if pass.is_null() {
return false;
}
let pass_manager = match llvm::LLVMRustPassKind(pass) {
llvm::PassKind::Function => fpm,
llvm::PassKind::Module => mpm,
llvm::PassKind::Other => {
diag_handler.err("Encountered LLVM pass kind we can't handle");
return true
},
};
llvm::LLVMRustAddPass(pass_manager, pass);
true
};
if !config.no_verify { assert!(addpass("verify")); }
if !config.no_prepopulate_passes {
llvm::LLVMRustAddAnalysisPasses(tm, fpm, llmod);
llvm::LLVMRustAddAnalysisPasses(tm, mpm, llmod);
let opt_level = config.opt_level.unwrap_or(llvm::CodeGenOptLevel::None);
with_llvm_pmb(llmod, &config, opt_level, &mut |b| {
llvm::LLVMPassManagerBuilderPopulateFunctionPassManager(b, fpm);
llvm::LLVMPassManagerBuilderPopulateModulePassManager(b, mpm);
})
}
for pass in &config.passes {
if !addpass(pass) {
diag_handler.warn(&format!("unknown pass `{}`, ignoring",
pass));
}
}
for pass in &cgcx.plugin_passes {
if !addpass(pass) {
diag_handler.err(&format!("a plugin asked for LLVM pass \
`{}` but LLVM does not \
recognize it", pass));
}
}
diag_handler.abort_if_errors();
// Finally, run the actual optimization passes
time(config.time_passes, &format!("llvm function passes [{}]", module_name.unwrap()), ||
llvm::LLVMRustRunFunctionPassManager(fpm, llmod));
timeline.record("fpm");
time(config.time_passes, &format!("llvm module passes [{}]", module_name.unwrap()), ||
llvm::LLVMRunPassManager(mpm, llmod));
// Deallocate managers that we're now done with
llvm::LLVMDisposePassManager(fpm);
llvm::LLVMDisposePassManager(mpm);
}
Ok(())
}
fn generate_lto_work(cgcx: &CodegenContext,
modules: Vec<ModuleTranslation>)
-> Vec<(WorkItem, u64)>
{
let mut timeline = cgcx.time_graph.as_ref().map(|tg| {
tg.start(TRANS_WORKER_TIMELINE,
TRANS_WORK_PACKAGE_KIND,
"generate lto")
}).unwrap_or(Timeline::noop());
let mode = if cgcx.lto {
lto::LTOMode::WholeCrateGraph
} else {
lto::LTOMode::JustThisCrate
};
let lto_modules = lto::run(cgcx, modules, mode, &mut timeline)
.unwrap_or_else(|e| panic!(e));
lto_modules.into_iter().map(|module| {
let cost = module.cost();
(WorkItem::LTO(module), cost)
}).collect()
}
unsafe fn codegen(cgcx: &CodegenContext,
diag_handler: &Handler,
mtrans: ModuleTranslation,
config: &ModuleConfig,
timeline: &mut Timeline)
-> Result<CompiledModule, FatalError>
{
timeline.record("codegen");
let (llmod, llcx, tm) = match mtrans.source {
ModuleSource::Translated(ref llvm) => (llvm.llmod, llvm.llcx, llvm.tm),
ModuleSource::Preexisting(_) => {
bug!("codegen: called with ModuleSource::Preexisting")
}
};
let module_name = mtrans.name.clone();
let module_name = Some(&module_name[..]);
let handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
if cgcx.msvc_imps_needed {
create_msvc_imps(cgcx, llcx, llmod);
}
// A codegen-specific pass manager is used to generate object
// files for an LLVM module.
//
// Apparently each of these pass managers is a one-shot kind of
// thing, so we create a new one for each type of output. The
// pass manager passed to the closure should be ensured to not
// escape the closure itself, and the manager should only be
// used once.
unsafe fn with_codegen<F, R>(tm: TargetMachineRef,
llmod: ModuleRef,
no_builtins: bool,
f: F) -> R
where F: FnOnce(PassManagerRef) -> R,
{
let cpm = llvm::LLVMCreatePassManager();
llvm::LLVMRustAddAnalysisPasses(tm, cpm, llmod);
llvm::LLVMRustAddLibraryInfo(cpm, llmod, no_builtins);
f(cpm)
}
// If we're going to generate wasm code from the assembly that llvm
// generates then we'll be transitively affecting a ton of options below.
// This only happens on the wasm target now.
let asm2wasm = cgcx.binaryen_linker &&
!cgcx.crate_types.contains(&config::CrateTypeRlib) &&
mtrans.kind == ModuleKind::Regular;
// Change what we write and cleanup based on whether obj files are
// just llvm bitcode. In that case write bitcode, and possibly
// delete the bitcode if it wasn't requested. Don't generate the
// machine code, instead copy the .o file from the .bc
let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm);
let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm;
let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm;
let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm;
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
if write_bc || config.emit_bc_compressed {
let thin;
let old;
let data = if llvm::LLVMRustThinLTOAvailable() {
thin = ThinBuffer::new(llmod);
thin.data()
} else {
old = ModuleBuffer::new(llmod);
old.data()
};
timeline.record("make-bc");
if write_bc {
if let Err(e) = fs::write(&bc_out, data) {
diag_handler.err(&format!("failed to write bytecode: {}", e));
}
timeline.record("write-bc");
}
if config.emit_bc_compressed {
let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION);
let data = bytecode::encode(&mtrans.llmod_id, data);
if let Err(e) = fs::write(&dst, data) {
diag_handler.err(&format!("failed to write bytecode: {}", e));
}
timeline.record("compress-bc");
}
}
time(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()),
|| -> Result<(), FatalError> {
if config.emit_ir {
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
let out = path2cstr(&out);
extern "C" fn demangle_callback(input_ptr: *const c_char,
input_len: size_t,
output_ptr: *mut c_char,
output_len: size_t) -> size_t {
let input = unsafe {
slice::from_raw_parts(input_ptr as *const u8, input_len as usize)
};
let input = match str::from_utf8(input) {
Ok(s) => s,
Err(_) => return 0,
};
let output = unsafe {
slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize)
};
let mut cursor = io::Cursor::new(output);
let demangled = match rustc_demangle::try_demangle(input) {
Ok(d) => d,
Err(_) => return 0,
};
if let Err(_) = write!(cursor, "{:#}", demangled) {
// Possible only if provided buffer is not big enough
return 0;
}
cursor.position() as size_t
}
with_codegen(tm, llmod, config.no_builtins, |cpm| {
llvm::LLVMRustPrintModule(cpm, llmod, out.as_ptr(), demangle_callback);
llvm::LLVMDisposePassManager(cpm);
});
timeline.record("ir");
}
if config.emit_asm || (asm2wasm && config.emit_obj) {
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers
// various errors like invalid IR or broken binaries, so we might have to clone the
// module to produce the asm output
let llmod = if config.emit_obj {
llvm::LLVMCloneModule(llmod)
} else {
llmod
};
with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(diag_handler, tm, cpm, llmod, &path,
llvm::FileType::AssemblyFile)
})?;
if config.emit_obj {
llvm::LLVMDisposeModule(llmod);
}
timeline.record("asm");
}
if asm2wasm && config.emit_obj {
let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
binaryen_assemble(cgcx, diag_handler, &assembly, &obj_out);
timeline.record("binaryen");
if !config.emit_asm {
drop(fs::remove_file(&assembly));
}
} else if write_obj {
with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(diag_handler, tm, cpm, llmod, &obj_out,
llvm::FileType::ObjectFile)
})?;
timeline.record("obj");
}
Ok(())
})?;
if copy_bc_to_obj {
debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out);
if let Err(e) = link_or_copy(&bc_out, &obj_out) {
diag_handler.err(&format!("failed to copy bitcode to object file: {}", e));
}
}
if rm_bc {
debug!("removing_bitcode {:?}", bc_out);
if let Err(e) = fs::remove_file(&bc_out) {
diag_handler.err(&format!("failed to remove bitcode: {}", e));
}
}
drop(handlers);
Ok(mtrans.into_compiled_module(config.emit_obj,
config.emit_bc,
config.emit_bc_compressed,
&cgcx.output_filenames))
}
/// Translates the LLVM-generated `assembly` on the filesystem into a wasm
/// module using binaryen, placing the output at `object`.
///
/// In this case the "object" is actually a full and complete wasm module. We
/// won't actually be doing anything else to the output for now. This is all
/// pretty janky and will get removed as soon as a linker for wasm exists.
fn binaryen_assemble(cgcx: &CodegenContext,
handler: &Handler,
assembly: &Path,
object: &Path) {
use rustc_binaryen::{Module, ModuleOptions};
let input = fs::read(&assembly).and_then(|contents| {
Ok(CString::new(contents)?)
});
let mut options = ModuleOptions::new();
if cgcx.debuginfo != config::NoDebugInfo {
options.debuginfo(true);
}
if cgcx.crate_types.contains(&config::CrateTypeExecutable) {
options.start("main");
}
options.stack(1024 * 1024);
options.import_memory(cgcx.wasm_import_memory);
let assembled = input.and_then(|input| {
Module::new(&input, &options)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
});
let err = assembled.and_then(|binary| {
fs::write(&object, binary.data())
});
if let Err(e) = err {
handler.err(&format!("failed to run binaryen assembler: {}", e));
}
}
pub struct CompiledModules {
pub modules: Vec<CompiledModule>,
pub metadata_module: CompiledModule,
pub allocator_module: Option<CompiledModule>,
}
fn need_crate_bitcode_for_rlib(sess: &Session) -> bool {
sess.crate_types.borrow().contains(&config::CrateTypeRlib) &&
sess.opts.output_types.contains_key(&OutputType::Exe)
}
pub fn start_async_translation(tcx: TyCtxt,
time_graph: Option<TimeGraph>,
link: LinkMeta,
metadata: EncodedMetadata,
coordinator_receive: Receiver<Box<Any + Send>>,
total_cgus: usize)
-> OngoingCrateTranslation {
let sess = tcx.sess;
let crate_output = tcx.output_filenames(LOCAL_CRATE);
let crate_name = tcx.crate_name(LOCAL_CRATE);
let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins");
let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs,
"windows_subsystem");
let windows_subsystem = subsystem.map(|subsystem| {
if subsystem != "windows" && subsystem != "console" {
tcx.sess.fatal(&format!("invalid windows subsystem `{}`, only \
`windows` and `console` are allowed",
subsystem));
}
subsystem.to_string()
});
let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
(tcx.sess.target.target.options.no_integrated_as &&
(crate_output.outputs.contains_key(&OutputType::Object) ||
crate_output.outputs.contains_key(&OutputType::Exe)));
let linker_info = LinkerInfo::new(tcx);
let crate_info = CrateInfo::new(tcx);
let output_types_override = if no_integrated_as {
OutputTypes::new(&[(OutputType::Assembly, None)])
} else {
sess.opts.output_types.clone()
};
// Figure out what we actually need to build.
let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone());
let mut metadata_config = ModuleConfig::new(vec![]);
let mut allocator_config = ModuleConfig::new(vec![]);
if let Some(ref sanitizer) = sess.opts.debugging_opts.sanitizer {
match *sanitizer {
Sanitizer::Address => {
modules_config.passes.push("asan".to_owned());
modules_config.passes.push("asan-module".to_owned());
}
Sanitizer::Memory => {
modules_config.passes.push("msan".to_owned())
}
Sanitizer::Thread => {
modules_config.passes.push("tsan".to_owned())
}
_ => {}
}
}
if sess.opts.debugging_opts.profile {
modules_config.passes.push("insert-gcov-profiling".to_owned())
}
modules_config.opt_level = Some(get_llvm_opt_level(sess.opts.optimize));
modules_config.opt_size = Some(get_llvm_opt_size(sess.opts.optimize));
// Save all versions of the bytecode if we're saving our temporaries.
if sess.opts.cg.save_temps {
modules_config.emit_no_opt_bc = true;
modules_config.emit_bc = true;
modules_config.emit_lto_bc = true;
metadata_config.emit_bc = true;
allocator_config.emit_bc = true;
}
// Emit compressed bitcode files for the crate if we're emitting an rlib.
// Whenever an rlib is created, the bitcode is inserted into the archive in
// order to allow LTO against it.
if need_crate_bitcode_for_rlib(sess) {
modules_config.emit_bc_compressed = true;
allocator_config.emit_bc_compressed = true;
}
for output_type in output_types_override.keys() {
match *output_type {
OutputType::Bitcode => { modules_config.emit_bc = true; }
OutputType::LlvmAssembly => { modules_config.emit_ir = true; }
OutputType::Assembly => {
modules_config.emit_asm = true;
// If we're not using the LLVM assembler, this function
// could be invoked specially with output_type_assembly, so
// in this case we still want the metadata object file.
if !sess.opts.output_types.contains_key(&OutputType::Assembly) {
metadata_config.emit_obj = true;
allocator_config.emit_obj = true;
}
}
OutputType::Object => { modules_config.emit_obj = true; }
OutputType::Metadata => { metadata_config.emit_obj = true; }
OutputType::Exe => {
modules_config.emit_obj = true;
metadata_config.emit_obj = true;
allocator_config.emit_obj = true;
},
OutputType::Mir => {}
OutputType::DepInfo => {}
}
}
modules_config.set_flags(sess, no_builtins);
metadata_config.set_flags(sess, no_builtins);
allocator_config.set_flags(sess, no_builtins);
// Exclude metadata and allocator modules from time_passes output, since
// they throw off the "LLVM passes" measurement.
metadata_config.time_passes = false;
allocator_config.time_passes = false;
let client = sess.jobserver_from_env.clone().unwrap_or_else(|| {
// Pick a "reasonable maximum" if we don't otherwise have a jobserver in
// our environment, capping out at 32 so we don't take everything down
// by hogging the process run queue.
Client::new(32).expect("failed to create jobserver")
});
let (shared_emitter, shared_emitter_main) = SharedEmitter::new();
let (trans_worker_send, trans_worker_receive) = channel();
let coordinator_thread = start_executing_work(tcx,
&crate_info,
shared_emitter,
trans_worker_send,
coordinator_receive,
total_cgus,
client,
time_graph.clone(),
Arc::new(modules_config),
Arc::new(metadata_config),
Arc::new(allocator_config));
OngoingCrateTranslation {
crate_name,
link,
metadata,
windows_subsystem,
linker_info,
no_integrated_as,
crate_info,
time_graph,
coordinator_send: tcx.tx_to_llvm_workers.clone(),
trans_worker_receive,
shared_emitter_main,
future: coordinator_thread,
output_filenames: tcx.output_filenames(LOCAL_CRATE),
}
}
fn copy_module_artifacts_into_incr_comp_cache(sess: &Session,
dep_graph: &DepGraph,
compiled_modules: &CompiledModules) {
if sess.opts.incremental.is_none() {
return;
}
for module in compiled_modules.modules.iter() {
let mut files = vec![];
if let Some(ref path) = module.object {
files.push((WorkProductFileKind::Object, path.clone()));
}
if let Some(ref path) = module.bytecode {
files.push((WorkProductFileKind::Bytecode, path.clone()));
}
if let Some(ref path) = module.bytecode_compressed {
files.push((WorkProductFileKind::BytecodeCompressed, path.clone()));
}
save_trans_partition(sess, dep_graph, &module.name, &files);
}
}
fn produce_final_output_artifacts(sess: &Session,
compiled_modules: &CompiledModules,
crate_output: &OutputFilenames) {
let mut user_wants_bitcode = false;
let mut user_wants_objects = false;
// Produce final compile outputs.
let copy_gracefully = |from: &Path, to: &Path| {
if let Err(e) = fs::copy(from, to) {
sess.err(&format!("could not copy {:?} to {:?}: {}", from, to, e));
}
};
let copy_if_one_unit = |output_type: OutputType,
keep_numbered: bool| {
if compiled_modules.modules.len() == 1 {
// 1) Only one codegen unit. In this case it's no difficulty
// to copy `foo.0.x` to `foo.x`.
let module_name = Some(&compiled_modules.modules[0].name[..]);
let path = crate_output.temp_path(output_type, module_name);
copy_gracefully(&path,
&crate_output.path(output_type));
if !sess.opts.cg.save_temps && !keep_numbered {
// The user just wants `foo.x`, not `foo.#module-name#.x`.
remove(sess, &path);
}
} else {
let ext = crate_output.temp_path(output_type, None)
.extension()
.unwrap()
.to_str()
.unwrap()
.to_owned();
if crate_output.outputs.contains_key(&output_type) {
// 2) Multiple codegen units, with `--emit foo=some_name`. We have
// no good solution for this case, so warn the user.
sess.warn(&format!("ignoring emit path because multiple .{} files \
were produced", ext));
} else if crate_output.single_output_file.is_some() {
// 3) Multiple codegen units, with `-o some_name`. We have
// no good solution for this case, so warn the user.
sess.warn(&format!("ignoring -o because multiple .{} files \
were produced", ext));
} else {
// 4) Multiple codegen units, but no explicit name. We
// just leave the `foo.0.x` files in place.
// (We don't have to do any work in this case.)
}
}
};
// Flag to indicate whether the user explicitly requested bitcode.
// Otherwise, we produced it only as a temporary output, and will need
// to get rid of it.
for output_type in crate_output.outputs.keys() {
match *output_type {
OutputType::Bitcode => {
user_wants_bitcode = true;
// Copy to .bc, but always keep the .0.bc. There is a later
// check to figure out if we should delete .0.bc files, or keep
// them for making an rlib.
copy_if_one_unit(OutputType::Bitcode, true);
}
OutputType::LlvmAssembly => {
copy_if_one_unit(OutputType::LlvmAssembly, false);
}
OutputType::Assembly => {
copy_if_one_unit(OutputType::Assembly, false);
}
OutputType::Object => {
user_wants_objects = true;
copy_if_one_unit(OutputType::Object, true);
}
OutputType::Mir |
OutputType::Metadata |
OutputType::Exe |
OutputType::DepInfo => {}
}
}
// Clean up unwanted temporary files.
// We create the following files by default:
// - #crate#.#module-name#.bc
// - #crate#.#module-name#.o
// - #crate#.crate.metadata.bc
// - #crate#.crate.metadata.o
// - #crate#.o (linked from crate.##.o)
// - #crate#.bc (copied from crate.##.bc)
// We may create additional files if requested by the user (through
// `-C save-temps` or `--emit=` flags).
if !sess.opts.cg.save_temps {
// Remove the temporary .#module-name#.o objects. If the user didn't
// explicitly request bitcode (with --emit=bc), and the bitcode is not
// needed for building an rlib, then we must remove .#module-name#.bc as
// well.
// Specific rules for keeping .#module-name#.bc:
// - If the user requested bitcode (`user_wants_bitcode`), and
// codegen_units > 1, then keep it.
// - If the user requested bitcode but codegen_units == 1, then we
// can toss .#module-name#.bc because we copied it to .bc earlier.
// - If we're not building an rlib and the user didn't request
// bitcode, then delete .#module-name#.bc.
// If you change how this works, also update back::link::link_rlib,
// where .#module-name#.bc files are (maybe) deleted after making an
// rlib.
let needs_crate_object = crate_output.outputs.contains_key(&OutputType::Exe);
let keep_numbered_bitcode = user_wants_bitcode && sess.codegen_units() > 1;
let keep_numbered_objects = needs_crate_object ||
(user_wants_objects && sess.codegen_units() > 1);
for module in compiled_modules.modules.iter() {
if let Some(ref path) = module.object {
if !keep_numbered_objects {
remove(sess, path);
}
}
if let Some(ref path) = module.bytecode {
if !keep_numbered_bitcode {
remove(sess, path);
}
}
}
if !user_wants_bitcode {
if let Some(ref path) = compiled_modules.metadata_module.bytecode {
remove(sess, &path);
}
if let Some(ref allocator_module) = compiled_modules.allocator_module {
if let Some(ref path) = allocator_module.bytecode {
remove(sess, path);
}
}
}
}
// We leave the following files around by default:
// - #crate#.o
// - #crate#.crate.metadata.o
// - #crate#.bc
// These are used in linking steps and will be cleaned up afterward.
}
pub fn dump_incremental_data(trans: &CrateTranslation) {
println!("[incremental] Re-using {} out of {} modules",
trans.modules.iter().filter(|m| m.pre_existing).count(),
trans.modules.len());
}
enum WorkItem {
Optimize(ModuleTranslation),
LTO(lto::LtoModuleTranslation),
}
impl WorkItem {
fn kind(&self) -> ModuleKind {
match *self {
WorkItem::Optimize(ref m) => m.kind,
WorkItem::LTO(_) => ModuleKind::Regular,
}
}
fn name(&self) -> String {
match *self {
WorkItem::Optimize(ref m) => format!("optimize: {}", m.name),
WorkItem::LTO(ref m) => format!("lto: {}", m.name()),
}
}
}
enum WorkItemResult {
Compiled(CompiledModule),
NeedsLTO(ModuleTranslation),
}
fn execute_work_item(cgcx: &CodegenContext,
work_item: WorkItem,
timeline: &mut Timeline)
-> Result<WorkItemResult, FatalError>
{
let diag_handler = cgcx.create_diag_handler();
let config = cgcx.config(work_item.kind());
let mtrans = match work_item {
WorkItem::Optimize(mtrans) => mtrans,
WorkItem::LTO(mut lto) => {
unsafe {
let module = lto.optimize(cgcx, timeline)?;
let module = codegen(cgcx, &diag_handler, module, config, timeline)?;
return Ok(WorkItemResult::Compiled(module))
}
}
};
let module_name = mtrans.name.clone();
let pre_existing = match mtrans.source {
ModuleSource::Translated(_) => None,
ModuleSource::Preexisting(ref wp) => Some(wp.clone()),
};
if let Some(wp) = pre_existing {
let incr_comp_session_dir = cgcx.incr_comp_session_dir
.as_ref()
.unwrap();
let name = &mtrans.name;
let mut object = None;
let mut bytecode = None;
let mut bytecode_compressed = None;
for (kind, saved_file) in wp.saved_files {
let obj_out = match kind {
WorkProductFileKind::Object => {
let path = cgcx.output_filenames.temp_path(OutputType::Object, Some(name));
object = Some(path.clone());
path
}
WorkProductFileKind::Bytecode => {
let path = cgcx.output_filenames.temp_path(OutputType::Bitcode, Some(name));
bytecode = Some(path.clone());
path
}
WorkProductFileKind::BytecodeCompressed => {
let path = cgcx.output_filenames.temp_path(OutputType::Bitcode, Some(name))
.with_extension(RLIB_BYTECODE_EXTENSION);
bytecode_compressed = Some(path.clone());
path
}
};
let source_file = in_incr_comp_dir(&incr_comp_session_dir,
&saved_file);
debug!("copying pre-existing module `{}` from {:?} to {}",
mtrans.name,
source_file,
obj_out.display());
match link_or_copy(&source_file, &obj_out) {
Ok(_) => { }
Err(err) => {
diag_handler.err(&format!("unable to copy {} to {}: {}",
source_file.display(),
obj_out.display(),
err));
}
}
}
assert_eq!(object.is_some(), config.emit_obj);
assert_eq!(bytecode.is_some(), config.emit_bc);
assert_eq!(bytecode_compressed.is_some(), config.emit_bc_compressed);
Ok(WorkItemResult::Compiled(CompiledModule {
llmod_id: mtrans.llmod_id.clone(),
name: module_name,
kind: ModuleKind::Regular,
pre_existing: true,
object,
bytecode,
bytecode_compressed,
}))
} else {
debug!("llvm-optimizing {:?}", module_name);
unsafe {
optimize(cgcx, &diag_handler, &mtrans, config, timeline)?;
let lto = cgcx.lto;
let auto_thin_lto =
cgcx.thinlto &&
cgcx.total_cgus > 1 &&
mtrans.kind != ModuleKind::Allocator;
// If we're a metadata module we never participate in LTO.
//
// If LTO was explicitly requested on the command line, we always
// LTO everything else.
//
// If LTO *wasn't* explicitly requested and we're not a metdata
// module, then we may automatically do ThinLTO if we've got
// multiple codegen units. Note, however, that the allocator module
// doesn't participate here automatically because of linker
// shenanigans later on.
if mtrans.kind == ModuleKind::Metadata || (!lto && !auto_thin_lto) {
let module = codegen(cgcx, &diag_handler, mtrans, config, timeline)?;
Ok(WorkItemResult::Compiled(module))
} else {
Ok(WorkItemResult::NeedsLTO(mtrans))
}
}
}
}
enum Message {
Token(io::Result<Acquired>),
NeedsLTO {
result: ModuleTranslation,
worker_id: usize,
},
Done {
result: Result<CompiledModule, ()>,
worker_id: usize,
},
TranslationDone {
llvm_work_item: WorkItem,
cost: u64,
},
TranslationComplete,
TranslateItem,
}
struct Diagnostic {
msg: String,
code: Option<DiagnosticId>,
lvl: Level,
}
#[derive(PartialEq, Clone, Copy, Debug)]
enum MainThreadWorkerState {
Idle,
Translating,
LLVMing,
}
fn start_executing_work(tcx: TyCtxt,
crate_info: &CrateInfo,
shared_emitter: SharedEmitter,
trans_worker_send: Sender<Message>,
coordinator_receive: Receiver<Box<Any + Send>>,
total_cgus: usize,
jobserver: Client,
time_graph: Option<TimeGraph>,
modules_config: Arc<ModuleConfig>,
metadata_config: Arc<ModuleConfig>,
allocator_config: Arc<ModuleConfig>)
-> thread::JoinHandle<Result<CompiledModules, ()>> {
let coordinator_send = tcx.tx_to_llvm_workers.clone();
let mut exported_symbols = FxHashMap();
exported_symbols.insert(LOCAL_CRATE, tcx.exported_symbols(LOCAL_CRATE));
for &cnum in tcx.crates().iter() {
exported_symbols.insert(cnum, tcx.exported_symbols(cnum));
}
let exported_symbols = Arc::new(exported_symbols);
let sess = tcx.sess;
// First up, convert our jobserver into a helper thread so we can use normal
// mpsc channels to manage our messages and such. Once we've got the helper
// thread then request `n-1` tokens because all of our work items are ready
// to go.
//
// Note that the `n-1` is here because we ourselves have a token (our
// process) and we'll use that token to execute at least one unit of work.
//
// After we've requested all these tokens then we'll, when we can, get
// tokens on `rx` above which will get managed in the main loop below.
let coordinator_send2 = coordinator_send.clone();
let helper = jobserver.into_helper_thread(move |token| {
drop(coordinator_send2.send(Box::new(Message::Token(token))));
}).expect("failed to spawn helper thread");
let mut each_linked_rlib_for_lto = Vec::new();
drop(link::each_linked_rlib(sess, crate_info, &mut |cnum, path| {
if link::ignored_for_lto(sess, crate_info, cnum) {
return
}
each_linked_rlib_for_lto.push((cnum, path.to_path_buf()));
}));
let crate_types = sess.crate_types.borrow();
let only_rlib = crate_types.len() == 1 &&
crate_types[0] == config::CrateTypeRlib;
let wasm_import_memory =
attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory");
let cgcx = CodegenContext {
crate_types: sess.crate_types.borrow().clone(),
each_linked_rlib_for_lto,
// If we're only building an rlibc then allow the LTO flag to be passed
// but don't actually do anything, the full LTO will happen later
lto: sess.lto() && !only_rlib,
// Enable ThinLTO if requested, but only if the target we're compiling
// for doesn't require full LTO. Some targets require one LLVM module
// (they effectively don't have a linker) so it's up to us to use LTO to
// link everything together.
thinlto: sess.thinlto() &&
!sess.target.target.options.requires_lto &&
unsafe { llvm::LLVMRustThinLTOAvailable() },
no_landing_pads: sess.no_landing_pads(),
fewer_names: sess.fewer_names(),
save_temps: sess.opts.cg.save_temps,
opts: Arc::new(sess.opts.clone()),
time_passes: sess.time_passes(),
exported_symbols,
plugin_passes: sess.plugin_llvm_passes.borrow().clone(),
remark: sess.opts.cg.remark.clone(),
worker: 0,
incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()),
coordinator_send,
diag_emitter: shared_emitter.clone(),
time_graph,
output_filenames: tcx.output_filenames(LOCAL_CRATE),
regular_module_config: modules_config,
metadata_module_config: metadata_config,
allocator_module_config: allocator_config,
tm_factory: target_machine_factory(tcx.sess),
total_cgus,
msvc_imps_needed: msvc_imps_needed(tcx),
target_pointer_width: tcx.sess.target.target.target_pointer_width.clone(),
binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen,
debuginfo: tcx.sess.opts.debuginfo,
wasm_import_memory: wasm_import_memory,
};
// This is the "main loop" of parallel work happening for parallel codegen.
// It's here that we manage parallelism, schedule work, and work with
// messages coming from clients.
//
// There are a few environmental pre-conditions that shape how the system
// is set up:
//
// - Error reporting only can happen on the main thread because that's the
// only place where we have access to the compiler `Session`.
// - LLVM work can be done on any thread.
// - Translation can only happen on the main thread.
// - Each thread doing substantial work most be in possession of a `Token`
// from the `Jobserver`.
// - The compiler process always holds one `Token`. Any additional `Tokens`
// have to be requested from the `Jobserver`.
//
// Error Reporting
// ===============
// The error reporting restriction is handled separately from the rest: We
// set up a `SharedEmitter` the holds an open channel to the main thread.
// When an error occurs on any thread, the shared emitter will send the
// error message to the receiver main thread (`SharedEmitterMain`). The
// main thread will periodically query this error message queue and emit
// any error messages it has received. It might even abort compilation if
// has received a fatal error. In this case we rely on all other threads
// being torn down automatically with the main thread.
// Since the main thread will often be busy doing translation work, error
// reporting will be somewhat delayed, since the message queue can only be
// checked in between to work packages.
//
// Work Processing Infrastructure
// ==============================
// The work processing infrastructure knows three major actors:
//
// - the coordinator thread,
// - the main thread, and
// - LLVM worker threads
//
// The coordinator thread is running a message loop. It instructs the main
// thread about what work to do when, and it will spawn off LLVM worker
// threads as open LLVM WorkItems become available.
//
// The job of the main thread is to translate CGUs into LLVM work package
// (since the main thread is the only thread that can do this). The main
// thread will block until it receives a message from the coordinator, upon
// which it will translate one CGU, send it to the coordinator and block
// again. This way the coordinator can control what the main thread is
// doing.
//
// The coordinator keeps a queue of LLVM WorkItems, and when a `Token` is
// available, it will spawn off a new LLVM worker thread and let it process
// that a WorkItem. When a LLVM worker thread is done with its WorkItem,
// it will just shut down, which also frees all resources associated with
// the given LLVM module, and sends a message to the coordinator that the
// has been completed.
//
// Work Scheduling
// ===============
// The scheduler's goal is to minimize the time it takes to complete all
// work there is, however, we also want to keep memory consumption low
// if possible. These two goals are at odds with each other: If memory
// consumption were not an issue, we could just let the main thread produce
// LLVM WorkItems at full speed, assuring maximal utilization of
// Tokens/LLVM worker threads. However, since translation usual is faster
// than LLVM processing, the queue of LLVM WorkItems would fill up and each
// WorkItem potentially holds on to a substantial amount of memory.
//
// So the actual goal is to always produce just enough LLVM WorkItems as
// not to starve our LLVM worker threads. That means, once we have enough
// WorkItems in our queue, we can block the main thread, so it does not
// produce more until we need them.
//
// Doing LLVM Work on the Main Thread
// ----------------------------------
// Since the main thread owns the compiler processes implicit `Token`, it is
// wasteful to keep it blocked without doing any work. Therefore, what we do
// in this case is: We spawn off an additional LLVM worker thread that helps
// reduce the queue. The work it is doing corresponds to the implicit
// `Token`. The coordinator will mark the main thread as being busy with
// LLVM work. (The actual work happens on another OS thread but we just care
// about `Tokens`, not actual threads).
//
// When any LLVM worker thread finishes while the main thread is marked as
// "busy with LLVM work", we can do a little switcheroo: We give the Token
// of the just finished thread to the LLVM worker thread that is working on
// behalf of the main thread's implicit Token, thus freeing up the main
// thread again. The coordinator can then again decide what the main thread
// should do. This allows the coordinator to make decisions at more points
// in time.
//
// Striking a Balance between Throughput and Memory Consumption
// ------------------------------------------------------------
// Since our two goals, (1) use as many Tokens as possible and (2) keep
// memory consumption as low as possible, are in conflict with each other,
// we have to find a trade off between them. Right now, the goal is to keep
// all workers busy, which means that no worker should find the queue empty
// when it is ready to start.
// How do we do achieve this? Good question :) We actually never know how
// many `Tokens` are potentially available so it's hard to say how much to
// fill up the queue before switching the main thread to LLVM work. Also we
// currently don't have a means to estimate how long a running LLVM worker
// will still be busy with it's current WorkItem. However, we know the
// maximal count of available Tokens that makes sense (=the number of CPU
// cores), so we can take a conservative guess. The heuristic we use here
// is implemented in the `queue_full_enough()` function.
//
// Some Background on Jobservers
// -----------------------------
// It's worth also touching on the management of parallelism here. We don't
// want to just spawn a thread per work item because while that's optimal
// parallelism it may overload a system with too many threads or violate our
// configuration for the maximum amount of cpu to use for this process. To
// manage this we use the `jobserver` crate.
//
// Job servers are an artifact of GNU make and are used to manage
// parallelism between processes. A jobserver is a glorified IPC semaphore
// basically. Whenever we want to run some work we acquire the semaphore,
// and whenever we're done with that work we release the semaphore. In this
// manner we can ensure that the maximum number of parallel workers is
// capped at any one point in time.
//
// LTO and the coordinator thread
// ------------------------------
//
// The final job the coordinator thread is responsible for is managing LTO
// and how that works. When LTO is requested what we'll to is collect all
// optimized LLVM modules into a local vector on the coordinator. Once all
// modules have been translated and optimized we hand this to the `lto`
// module for further optimization. The `lto` module will return back a list
// of more modules to work on, which the coordinator will continue to spawn
// work for.
//
// Each LLVM module is automatically sent back to the coordinator for LTO if
// necessary. There's already optimizations in place to avoid sending work
// back to the coordinator if LTO isn't requested.
return thread::spawn(move || {
// We pretend to be within the top-level LLVM time-passes task here:
set_time_depth(1);
let max_workers = ::num_cpus::get();
let mut worker_id_counter = 0;
let mut free_worker_ids = Vec::new();
let mut get_worker_id = |free_worker_ids: &mut Vec<usize>| {
if let Some(id) = free_worker_ids.pop() {
id
} else {
let id = worker_id_counter;
worker_id_counter += 1;
id
}
};
// This is where we collect codegen units that have gone all the way
// through translation and LLVM.
let mut compiled_modules = vec![];
let mut compiled_metadata_module = None;
let mut compiled_allocator_module = None;
let mut needs_lto = Vec::new();
let mut started_lto = false;
// This flag tracks whether all items have gone through translations
let mut translation_done = false;
// This is the queue of LLVM work items that still need processing.
let mut work_items = Vec::<(WorkItem, u64)>::new();
// This are the Jobserver Tokens we currently hold. Does not include
// the implicit Token the compiler process owns no matter what.
let mut tokens = Vec::new();
let mut main_thread_worker_state = MainThreadWorkerState::Idle;
let mut running = 0;
let mut llvm_start_time = None;
// Run the message loop while there's still anything that needs message
// processing:
while !translation_done ||
work_items.len() > 0 ||
running > 0 ||
needs_lto.len() > 0 ||
main_thread_worker_state != MainThreadWorkerState::Idle {
// While there are still CGUs to be translated, the coordinator has
// to decide how to utilize the compiler processes implicit Token:
// For translating more CGU or for running them through LLVM.
if !translation_done {
if main_thread_worker_state == MainThreadWorkerState::Idle {
if !queue_full_enough(work_items.len(), running, max_workers) {
// The queue is not full enough, translate more items:
if let Err(_) = trans_worker_send.send(Message::TranslateItem) {
panic!("Could not send Message::TranslateItem to main thread")
}
main_thread_worker_state = MainThreadWorkerState::Translating;
} else {
// The queue is full enough to not let the worker
// threads starve. Use the implicit Token to do some
// LLVM work too.
let (item, _) = work_items.pop()
.expect("queue empty - queue_full_enough() broken?");
let cgcx = CodegenContext {
worker: get_worker_id(&mut free_worker_ids),
.. cgcx.clone()
};
maybe_start_llvm_timer(cgcx.config(item.kind()),
&mut llvm_start_time);
main_thread_worker_state = MainThreadWorkerState::LLVMing;
spawn_work(cgcx, item);
}
}
} else {
// If we've finished everything related to normal translation
// then it must be the case that we've got some LTO work to do.
// Perform the serial work here of figuring out what we're
// going to LTO and then push a bunch of work items onto our
// queue to do LTO
if work_items.len() == 0 &&
running == 0 &&
main_thread_worker_state == MainThreadWorkerState::Idle {
assert!(!started_lto);
assert!(needs_lto.len() > 0);
started_lto = true;
let modules = mem::replace(&mut needs_lto, Vec::new());
for (work, cost) in generate_lto_work(&cgcx, modules) {
let insertion_index = work_items
.binary_search_by_key(&cost, |&(_, cost)| cost)
.unwrap_or_else(|e| e);
work_items.insert(insertion_index, (work, cost));
helper.request_token();
}
}
// In this branch, we know that everything has been translated,
// so it's just a matter of determining whether the implicit
// Token is free to use for LLVM work.
match main_thread_worker_state {
MainThreadWorkerState::Idle => {
if let Some((item, _)) = work_items.pop() {
let cgcx = CodegenContext {
worker: get_worker_id(&mut free_worker_ids),
.. cgcx.clone()
};
maybe_start_llvm_timer(cgcx.config(item.kind()),
&mut llvm_start_time);
main_thread_worker_state = MainThreadWorkerState::LLVMing;
spawn_work(cgcx, item);
} else {
// There is no unstarted work, so let the main thread
// take over for a running worker. Otherwise the
// implicit token would just go to waste.
// We reduce the `running` counter by one. The
// `tokens.truncate()` below will take care of
// giving the Token back.
debug_assert!(running > 0);
running -= 1;
main_thread_worker_state = MainThreadWorkerState::LLVMing;
}
}
MainThreadWorkerState::Translating => {
bug!("trans worker should not be translating after \
translation was already completed")
}
MainThreadWorkerState::LLVMing => {
// Already making good use of that token
}
}
}
// Spin up what work we can, only doing this while we've got available
// parallelism slots and work left to spawn.
while work_items.len() > 0 && running < tokens.len() {
let (item, _) = work_items.pop().unwrap();
maybe_start_llvm_timer(cgcx.config(item.kind()),
&mut llvm_start_time);
let cgcx = CodegenContext {
worker: get_worker_id(&mut free_worker_ids),
.. cgcx.clone()
};
spawn_work(cgcx, item);
running += 1;
}
// Relinquish accidentally acquired extra tokens
tokens.truncate(running);
let msg = coordinator_receive.recv().unwrap();
match *msg.downcast::<Message>().ok().unwrap() {
// Save the token locally and the next turn of the loop will use
// this to spawn a new unit of work, or it may get dropped
// immediately if we have no more work to spawn.
Message::Token(token) => {
match token {
Ok(token) => {
tokens.push(token);
if main_thread_worker_state == MainThreadWorkerState::LLVMing {
// If the main thread token is used for LLVM work
// at the moment, we turn that thread into a regular
// LLVM worker thread, so the main thread is free
// to react to translation demand.
main_thread_worker_state = MainThreadWorkerState::Idle;
running += 1;
}
}
Err(e) => {
let msg = &format!("failed to acquire jobserver token: {}", e);
shared_emitter.fatal(msg);
// Exit the coordinator thread
panic!("{}", msg)
}
}
}
Message::TranslationDone { llvm_work_item, cost } => {
// We keep the queue sorted by estimated processing cost,
// so that more expensive items are processed earlier. This
// is good for throughput as it gives the main thread more
// time to fill up the queue and it avoids scheduling
// expensive items to the end.
// Note, however, that this is not ideal for memory
// consumption, as LLVM module sizes are not evenly
// distributed.
let insertion_index =
work_items.binary_search_by_key(&cost, |&(_, cost)| cost);
let insertion_index = match insertion_index {
Ok(idx) | Err(idx) => idx
};
work_items.insert(insertion_index, (llvm_work_item, cost));
helper.request_token();
assert_eq!(main_thread_worker_state,
MainThreadWorkerState::Translating);
main_thread_worker_state = MainThreadWorkerState::Idle;
}
Message::TranslationComplete => {
translation_done = true;
assert_eq!(main_thread_worker_state,
MainThreadWorkerState::Translating);
main_thread_worker_state = MainThreadWorkerState::Idle;
}
// If a thread exits successfully then we drop a token associated
// with that worker and update our `running` count. We may later
// re-acquire a token to continue running more work. We may also not
// actually drop a token here if the worker was running with an
// "ephemeral token"
//
// Note that if the thread failed that means it panicked, so we
// abort immediately.
Message::Done { result: Ok(compiled_module), worker_id } => {
if main_thread_worker_state == MainThreadWorkerState::LLVMing {
main_thread_worker_state = MainThreadWorkerState::Idle;
} else {
running -= 1;
}
free_worker_ids.push(worker_id);
match compiled_module.kind {
ModuleKind::Regular => {
compiled_modules.push(compiled_module);
}
ModuleKind::Metadata => {
assert!(compiled_metadata_module.is_none());
compiled_metadata_module = Some(compiled_module);
}
ModuleKind::Allocator => {
assert!(compiled_allocator_module.is_none());
compiled_allocator_module = Some(compiled_module);
}
}
}
Message::NeedsLTO { result, worker_id } => {
assert!(!started_lto);
if main_thread_worker_state == MainThreadWorkerState::LLVMing {
main_thread_worker_state = MainThreadWorkerState::Idle;
} else {
running -= 1;
}
free_worker_ids.push(worker_id);
needs_lto.push(result);
}
Message::Done { result: Err(()), worker_id: _ } => {
shared_emitter.fatal("aborting due to worker thread failure");
// Exit the coordinator thread
return Err(())
}
Message::TranslateItem => {
bug!("the coordinator should not receive translation requests")
}
}
}
if let Some(llvm_start_time) = llvm_start_time {
let total_llvm_time = Instant::now().duration_since(llvm_start_time);
// This is the top-level timing for all of LLVM, set the time-depth
// to zero.
set_time_depth(0);
print_time_passes_entry(cgcx.time_passes,
"LLVM passes",
total_llvm_time);
}
// Regardless of what order these modules completed in, report them to
// the backend in the same order every time to ensure that we're handing
// out deterministic results.
compiled_modules.sort_by(|a, b| a.name.cmp(&b.name));
let compiled_metadata_module = compiled_metadata_module
.expect("Metadata module not compiled?");
Ok(CompiledModules {
modules: compiled_modules,
metadata_module: compiled_metadata_module,
allocator_module: compiled_allocator_module,
})
});
// A heuristic that determines if we have enough LLVM WorkItems in the
// queue so that the main thread can do LLVM work instead of translation
fn queue_full_enough(items_in_queue: usize,
workers_running: usize,
max_workers: usize) -> bool {
// Tune me, plz.
items_in_queue > 0 &&
items_in_queue >= max_workers.saturating_sub(workers_running / 2)
}
fn maybe_start_llvm_timer(config: &ModuleConfig,
llvm_start_time: &mut Option<Instant>) {
// We keep track of the -Ztime-passes output manually,
// since the closure-based interface does not fit well here.
if config.time_passes {
if llvm_start_time.is_none() {
*llvm_start_time = Some(Instant::now());
}
}
}
}
pub const TRANS_WORKER_ID: usize = ::std::usize::MAX;
pub const TRANS_WORKER_TIMELINE: time_graph::TimelineId =
time_graph::TimelineId(TRANS_WORKER_ID);
pub const TRANS_WORK_PACKAGE_KIND: time_graph::WorkPackageKind =
time_graph::WorkPackageKind(&["#DE9597", "#FED1D3", "#FDC5C7", "#B46668", "#88494B"]);
const LLVM_WORK_PACKAGE_KIND: time_graph::WorkPackageKind =
time_graph::WorkPackageKind(&["#7DB67A", "#C6EEC4", "#ACDAAA", "#579354", "#3E6F3C"]);
fn spawn_work(cgcx: CodegenContext, work: WorkItem) {
let depth = time_depth();
thread::spawn(move || {
set_time_depth(depth);
// Set up a destructor which will fire off a message that we're done as
// we exit.
struct Bomb {
coordinator_send: Sender<Box<Any + Send>>,
result: Option<WorkItemResult>,
worker_id: usize,
}
impl Drop for Bomb {
fn drop(&mut self) {
let worker_id = self.worker_id;
let msg = match self.result.take() {
Some(WorkItemResult::Compiled(m)) => {
Message::Done { result: Ok(m), worker_id }
}
Some(WorkItemResult::NeedsLTO(m)) => {
Message::NeedsLTO { result: m, worker_id }
}
None => Message::Done { result: Err(()), worker_id }
};
drop(self.coordinator_send.send(Box::new(msg)));
}
}
let mut bomb = Bomb {
coordinator_send: cgcx.coordinator_send.clone(),
result: None,
worker_id: cgcx.worker,
};
// Execute the work itself, and if it finishes successfully then flag
// ourselves as a success as well.
//
// Note that we ignore any `FatalError` coming out of `execute_work_item`,
// as a diagnostic was already sent off to the main thread - just
// surface that there was an error in this worker.
bomb.result = {
let timeline = cgcx.time_graph.as_ref().map(|tg| {
tg.start(time_graph::TimelineId(cgcx.worker),
LLVM_WORK_PACKAGE_KIND,
&work.name())
});
let mut timeline = timeline.unwrap_or(Timeline::noop());
execute_work_item(&cgcx, work, &mut timeline).ok()
};
});
}
pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
let (pname, mut cmd, _) = get_linker(sess);
for arg in &sess.target.target.options.asm_args {
cmd.arg(arg);
}
cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object))
.arg(&outputs.temp_path(OutputType::Assembly, None));
debug!("{:?}", cmd);
match cmd.output() {
Ok(prog) => {
if !prog.status.success() {
let mut note = prog.stderr.clone();
note.extend_from_slice(&prog.stdout);
sess.struct_err(&format!("linking with `{}` failed: {}",
pname.display(),
prog.status))
.note(&format!("{:?}", &cmd))
.note(str::from_utf8(¬e[..]).unwrap())
.emit();
sess.abort_if_errors();
}
},
Err(e) => {
sess.err(&format!("could not exec the linker `{}`: {}", pname.display(), e));
sess.abort_if_errors();
}
}
}
pub unsafe fn with_llvm_pmb(llmod: ModuleRef,
config: &ModuleConfig,
opt_level: llvm::CodeGenOptLevel,
f: &mut FnMut(llvm::PassManagerBuilderRef)) {
// Create the PassManagerBuilder for LLVM. We configure it with
// reasonable defaults and prepare it to actually populate the pass
// manager.
let builder = llvm::LLVMPassManagerBuilderCreate();
let opt_size = config.opt_size.unwrap_or(llvm::CodeGenOptSizeNone);
let inline_threshold = config.inline_threshold;
llvm::LLVMRustConfigurePassManagerBuilder(builder,
opt_level,
config.merge_functions,
config.vectorize_slp,
config.vectorize_loop);
llvm::LLVMPassManagerBuilderSetSizeLevel(builder, opt_size as u32);
if opt_size != llvm::CodeGenOptSizeNone {
llvm::LLVMPassManagerBuilderSetDisableUnrollLoops(builder, 1);
}
llvm::LLVMRustAddBuilderLibraryInfo(builder, llmod, config.no_builtins);
// Here we match what clang does (kinda). For O0 we only inline
// always-inline functions (but don't add lifetime intrinsics), at O1 we
// inline with lifetime intrinsics, and O2+ we add an inliner with a
// thresholds copied from clang.
match (opt_level, opt_size, inline_threshold) {
(.., Some(t)) => {
llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, t as u32);
}
(llvm::CodeGenOptLevel::Aggressive, ..) => {
llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 275);
}
(_, llvm::CodeGenOptSizeDefault, _) => {
llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 75);
}
(_, llvm::CodeGenOptSizeAggressive, _) => {
llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 25);
}
(llvm::CodeGenOptLevel::None, ..) => {
llvm::LLVMRustAddAlwaysInlinePass(builder, false);
}
(llvm::CodeGenOptLevel::Less, ..) => {
llvm::LLVMRustAddAlwaysInlinePass(builder, true);
}
(llvm::CodeGenOptLevel::Default, ..) => {
llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 225);
}
(llvm::CodeGenOptLevel::Other, ..) => {
bug!("CodeGenOptLevel::Other selected")
}
}
f(builder);
llvm::LLVMPassManagerBuilderDispose(builder);
}
enum SharedEmitterMessage {
Diagnostic(Diagnostic),
InlineAsmError(u32, String),
AbortIfErrors,
Fatal(String),
}
#[derive(Clone)]
pub struct SharedEmitter {
sender: Sender<SharedEmitterMessage>,
}
pub struct SharedEmitterMain {
receiver: Receiver<SharedEmitterMessage>,
}
impl SharedEmitter {
pub fn new() -> (SharedEmitter, SharedEmitterMain) {
let (sender, receiver) = channel();
(SharedEmitter { sender }, SharedEmitterMain { receiver })
}
fn inline_asm_error(&self, cookie: u32, msg: String) {
drop(self.sender.send(SharedEmitterMessage::InlineAsmError(cookie, msg)));
}
fn fatal(&self, msg: &str) {
drop(self.sender.send(SharedEmitterMessage::Fatal(msg.to_string())));
}
}
impl Emitter for SharedEmitter {
fn emit(&mut self, db: &DiagnosticBuilder) {
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
msg: db.message(),
code: db.code.clone(),
lvl: db.level,
})));
for child in &db.children {
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
msg: child.message(),
code: None,
lvl: child.level,
})));
}
drop(self.sender.send(SharedEmitterMessage::AbortIfErrors));
}
}
impl SharedEmitterMain {
pub fn check(&self, sess: &Session, blocking: bool) {
loop {
let message = if blocking {
match self.receiver.recv() {
Ok(message) => Ok(message),
Err(_) => Err(()),
}
} else {
match self.receiver.try_recv() {
Ok(message) => Ok(message),
Err(_) => Err(()),
}
};
match message {
Ok(SharedEmitterMessage::Diagnostic(diag)) => {
let handler = sess.diagnostic();
match diag.code {
Some(ref code) => {
handler.emit_with_code(&MultiSpan::new(),
&diag.msg,
code.clone(),
diag.lvl);
}
None => {
handler.emit(&MultiSpan::new(),
&diag.msg,
diag.lvl);
}
}
}
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => {
match Mark::from_u32(cookie).expn_info() {
Some(ei) => sess.span_err(ei.call_site, &msg),
None => sess.err(&msg),
}
}
Ok(SharedEmitterMessage::AbortIfErrors) => {
sess.abort_if_errors();
}
Ok(SharedEmitterMessage::Fatal(msg)) => {
sess.fatal(&msg);
}
Err(_) => {
break;
}
}
}
}
}
pub struct OngoingCrateTranslation {
crate_name: Symbol,
link: LinkMeta,
metadata: EncodedMetadata,
windows_subsystem: Option<String>,
linker_info: LinkerInfo,
no_integrated_as: bool,
crate_info: CrateInfo,
time_graph: Option<TimeGraph>,
coordinator_send: Sender<Box<Any + Send>>,
trans_worker_receive: Receiver<Message>,
shared_emitter_main: SharedEmitterMain,
future: thread::JoinHandle<Result<CompiledModules, ()>>,
output_filenames: Arc<OutputFilenames>,
}
impl OngoingCrateTranslation {
pub fn join(self, sess: &Session, dep_graph: &DepGraph) -> CrateTranslation {
self.shared_emitter_main.check(sess, true);
let compiled_modules = match self.future.join() {
Ok(Ok(compiled_modules)) => compiled_modules,
Ok(Err(())) => {
sess.abort_if_errors();
panic!("expected abort due to worker thread errors")
},
Err(_) => {
sess.fatal("Error during translation/LLVM phase.");
}
};
sess.abort_if_errors();
if let Some(time_graph) = self.time_graph {
time_graph.dump(&format!("{}-timings", self.crate_name));
}
copy_module_artifacts_into_incr_comp_cache(sess,
dep_graph,
&compiled_modules);
produce_final_output_artifacts(sess,
&compiled_modules,
&self.output_filenames);
// FIXME: time_llvm_passes support - does this use a global context or
// something?
if sess.codegen_units() == 1 && sess.time_llvm_passes() {
unsafe { llvm::LLVMRustPrintPassTimings(); }
}
let trans = CrateTranslation {
crate_name: self.crate_name,
link: self.link,
metadata: self.metadata,
windows_subsystem: self.windows_subsystem,
linker_info: self.linker_info,
crate_info: self.crate_info,
modules: compiled_modules.modules,
allocator_module: compiled_modules.allocator_module,
metadata_module: compiled_modules.metadata_module,
};
if self.no_integrated_as {
run_assembler(sess, &self.output_filenames);
// HACK the linker expects the object file to be named foo.0.o but
// `run_assembler` produces an object named just foo.o. Rename it if we
// are going to build an executable
if sess.opts.output_types.contains_key(&OutputType::Exe) {
let f = self.output_filenames.path(OutputType::Object);
rename_or_copy_remove(&f,
f.with_file_name(format!("{}.0.o",
f.file_stem().unwrap().to_string_lossy()))).unwrap();
}
// Remove assembly source, unless --save-temps was specified
if !sess.opts.cg.save_temps {
fs::remove_file(&self.output_filenames
.temp_path(OutputType::Assembly, None)).unwrap();
}
}
trans
}
pub fn submit_pre_translated_module_to_llvm(&self,
tcx: TyCtxt,
mtrans: ModuleTranslation) {
self.wait_for_signal_to_translate_item();
self.check_for_errors(tcx.sess);
// These are generally cheap and won't through off scheduling.
let cost = 0;
submit_translated_module_to_llvm(tcx, mtrans, cost);
}
pub fn translation_finished(&self, tcx: TyCtxt) {
self.wait_for_signal_to_translate_item();
self.check_for_errors(tcx.sess);
drop(self.coordinator_send.send(Box::new(Message::TranslationComplete)));
}
pub fn check_for_errors(&self, sess: &Session) {
self.shared_emitter_main.check(sess, false);
}
pub fn wait_for_signal_to_translate_item(&self) {
match self.trans_worker_receive.recv() {
Ok(Message::TranslateItem) => {
// Nothing to do
}
Ok(_) => panic!("unexpected message"),
Err(_) => {
// One of the LLVM threads must have panicked, fall through so
// error handling can be reached.
}
}
}
}
pub fn submit_translated_module_to_llvm(tcx: TyCtxt,
mtrans: ModuleTranslation,
cost: u64) {
let llvm_work_item = WorkItem::Optimize(mtrans);
drop(tcx.tx_to_llvm_workers.send(Box::new(Message::TranslationDone {
llvm_work_item,
cost,
})));
}
fn msvc_imps_needed(tcx: TyCtxt) -> bool {
tcx.sess.target.target.options.is_like_msvc &&
tcx.sess.crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib)
}
// Create a `__imp_<symbol> = &symbol` global for every public static `symbol`.
// This is required to satisfy `dllimport` references to static data in .rlibs
// when using MSVC linker. We do this only for data, as linker can fix up
// code references on its own.
// See #26591, #27438
fn create_msvc_imps(cgcx: &CodegenContext, llcx: ContextRef, llmod: ModuleRef) {
if !cgcx.msvc_imps_needed {
return
}
// The x86 ABI seems to require that leading underscores are added to symbol
// names, so we need an extra underscore on 32-bit. There's also a leading
// '\x01' here which disables LLVM's symbol mangling (e.g. no extra
// underscores added in front).
let prefix = if cgcx.target_pointer_width == "32" {
"\x01__imp__"
} else {
"\x01__imp_"
};
unsafe {
let i8p_ty = Type::i8p_llcx(llcx);
let globals = base::iter_globals(llmod)
.filter(|&val| {
llvm::LLVMRustGetLinkage(val) == llvm::Linkage::ExternalLinkage &&
llvm::LLVMIsDeclaration(val) == 0
})
.map(move |val| {
let name = CStr::from_ptr(llvm::LLVMGetValueName(val));
let mut imp_name = prefix.as_bytes().to_vec();
imp_name.extend(name.to_bytes());
let imp_name = CString::new(imp_name).unwrap();
(imp_name, val)
})
.collect::<Vec<_>>();
for (imp_name, val) in globals {
let imp = llvm::LLVMAddGlobal(llmod,
i8p_ty.to_ref(),
imp_name.as_ptr() as *const _);
llvm::LLVMSetInitializer(imp, consts::ptrcast(val, i8p_ty));
llvm::LLVMRustSetLinkage(imp, llvm::Linkage::ExternalLinkage);
}
}
}
| 40.5359 | 100 | 0.56711 |
cc64d91ffb338de7af3e7b8c9187c4a636c4aeee | 3,384 | //! Implements the key expansions for the encryption and MAC algorithms.
use definitions::{algorithms::KeyExchangeHashFunction, write};
use num_bigint::BigInt;
/// References the buffers where the keys are generated.
pub(super) struct Keys<'a> {
/// The encryption IV used for client to server communication.
pub(super) encryption_c2s_iv: &'a mut [u8],
/// The encryption IV used for server to client communication.
pub(super) encryption_s2c_iv: &'a mut [u8],
/// The encryption key used for client to server communication.
pub(super) encryption_c2s_key: &'a mut [u8],
/// The encryption key used for server to client communication.
pub(super) encryption_s2c_key: &'a mut [u8],
/// The mac key used for client to server communication.
pub(super) mac_c2s_key: &'a mut [u8],
/// The mac key used for server to client communication.
pub(super) mac_s2c_key: &'a mut [u8],
}
pub(super) fn expand_keys(
keys: &mut Keys,
hash_fn: KeyExchangeHashFunction,
shared_secret: &BigInt,
exchange_hash: &[u8],
session_id: &[u8],
) {
// `key_vec` is a vector constructed as
// `HASH(shared_secret || exchange_hash || X || session_id)` according to section 7.2 of
// RFC 4253
//
// X is one of `"A"`, `"B"`, `"C"`, `"D"`, `"E"` or `"F"`, depending on the algorithm.
let (letter_offset, mut initial_key_vec) = {
let mut key_vec = Vec::new();
write::mpint(shared_secret, &mut key_vec).expect("vec writes cannot fail");
key_vec.reserve_exact(exchange_hash.len() + 1 + session_id.len());
key_vec.extend(exchange_hash);
let letter_offset = key_vec.len();
// This will be replaced with the correct value for the given algorithm.
key_vec.extend(b"X");
key_vec.extend(session_id);
(letter_offset, key_vec)
};
let mut expanded_key_vec = None;
let mut expand_into_slice = |slice: &mut [u8], letter: u8| {
initial_key_vec[letter_offset] = letter;
let mut vec = hash_fn(&initial_key_vec);
expand_key(
&mut vec,
&mut expanded_key_vec,
shared_secret,
exchange_hash,
slice.len(),
hash_fn,
);
slice.copy_from_slice(&vec[..slice.len()]);
};
expand_into_slice(keys.encryption_c2s_iv, b'A');
expand_into_slice(keys.encryption_s2c_iv, b'B');
expand_into_slice(keys.encryption_c2s_key, b'C');
expand_into_slice(keys.encryption_s2c_key, b'D');
expand_into_slice(keys.mac_c2s_key, b'E');
expand_into_slice(keys.mac_s2c_key, b'F');
}
/// Expands the given key to the needed size.
pub(super) fn expand_key(
key: &mut Vec<u8>,
expanded_key_vec: &mut Option<Vec<u8>>,
shared_secret: &BigInt,
exchange_hash: &[u8],
len: usize,
hash_fn: KeyExchangeHashFunction,
) {
if key.len() >= len {
return;
}
let key_vec = expanded_key_vec.get_or_insert_with(|| {
let mut vec = Vec::new();
write::mpint(shared_secret, &mut vec).expect("vec writes cannot fail");
vec.extend(exchange_hash);
vec
});
let start_len = key_vec.len();
key_vec.extend(&key[..]);
while key.len() < len {
let hash = hash_fn(key_vec);
key.extend(&hash);
key_vec.extend(&hash);
}
key_vec.truncate(start_len);
}
| 30.763636 | 92 | 0.633274 |
56e6bacf93e30eef6f0a5c05963e5fd9468e36bf | 112,971 | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Vectors
#[warn(non_camel_case_types)];
use cast::transmute;
use cast;
use container::{Container, Mutable};
use cmp;
use cmp::{Eq, Ord, TotalEq, TotalOrd, Ordering, Less, Equal, Greater};
use clone::Clone;
use iterator::{FromIterator, Iterator, IteratorUtil};
use iter::FromIter;
use kinds::Copy;
use libc;
use num::Zero;
use option::{None, Option, Some};
use ptr::to_unsafe_ptr;
use ptr;
use ptr::RawPtr;
use sys;
use sys::size_of;
use uint;
use unstable::intrinsics;
#[cfg(stage0)]
use intrinsic::{get_tydesc};
#[cfg(not(stage0))]
use unstable::intrinsics::{get_tydesc};
use vec;
use util;
#[cfg(not(test))] use cmp::Equiv;
pub mod rustrt {
use libc;
use vec::raw;
#[cfg(stage0)]
use intrinsic::{TyDesc};
#[cfg(not(stage0))]
use unstable::intrinsics::{TyDesc};
#[abi = "cdecl"]
pub extern {
// These names are terrible. reserve_shared applies
// to ~[] and reserve_shared_actual applies to @[].
#[fast_ffi]
unsafe fn vec_reserve_shared(t: *TyDesc,
v: **raw::VecRepr,
n: libc::size_t);
#[fast_ffi]
unsafe fn vec_reserve_shared_actual(t: *TyDesc,
v: **raw::VecRepr,
n: libc::size_t);
}
}
/// Returns true if two vectors have the same length
pub fn same_length<T, U>(xs: &const [T], ys: &const [U]) -> bool {
xs.len() == ys.len()
}
/**
* Reserves capacity for exactly `n` elements in the given vector.
*
* If the capacity for `v` is already equal to or greater than the requested
* capacity, then no action is taken.
*
* # Arguments
*
* * v - A vector
* * n - The number of elements to reserve space for
*/
#[inline]
pub fn reserve<T>(v: &mut ~[T], n: uint) {
// Only make the (slow) call into the runtime if we have to
use managed;
if capacity(v) < n {
unsafe {
let ptr: **raw::VecRepr = cast::transmute(v);
let td = get_tydesc::<T>();
if ((**ptr).box_header.ref_count ==
managed::raw::RC_MANAGED_UNIQUE) {
rustrt::vec_reserve_shared_actual(td, ptr, n as libc::size_t);
} else {
rustrt::vec_reserve_shared(td, ptr, n as libc::size_t);
}
}
}
}
/**
* Reserves capacity for at least `n` elements in the given vector.
*
* This function will over-allocate in order to amortize the allocation costs
* in scenarios where the caller may need to repeatedly reserve additional
* space.
*
* If the capacity for `v` is already equal to or greater than the requested
* capacity, then no action is taken.
*
* # Arguments
*
* * v - A vector
* * n - The number of elements to reserve space for
*/
pub fn reserve_at_least<T>(v: &mut ~[T], n: uint) {
reserve(v, uint::next_power_of_two(n));
}
/// Returns the number of elements the vector can hold without reallocating
#[inline]
pub fn capacity<T>(v: &const ~[T]) -> uint {
unsafe {
let repr: **raw::VecRepr = transmute(v);
(**repr).unboxed.alloc / sys::nonzero_size_of::<T>()
}
}
/**
* Creates and initializes an owned vector.
*
* Creates an owned vector of size `n_elts` and initializes the elements
* to the value returned by the function `op`.
*/
pub fn from_fn<T>(n_elts: uint, op: &fn(uint) -> T) -> ~[T] {
unsafe {
let mut v = with_capacity(n_elts);
do as_mut_buf(v) |p, _len| {
let mut i: uint = 0u;
while i < n_elts {
intrinsics::move_val_init(&mut(*ptr::mut_offset(p, i)), op(i));
i += 1u;
}
}
raw::set_len(&mut v, n_elts);
v
}
}
/**
* Creates and initializes an owned vector.
*
* Creates an owned vector of size `n_elts` and initializes the elements
* to the value `t`.
*/
pub fn from_elem<T:Copy>(n_elts: uint, t: T) -> ~[T] {
// FIXME (#7136): manually inline from_fn for 2x plus speedup (sadly very
// important, from_elem is a bottleneck in borrowck!). Unfortunately it
// still is substantially slower than using the unsafe
// vec::with_capacity/ptr::set_memory for primitive types.
unsafe {
let mut v = with_capacity(n_elts);
do as_mut_buf(v) |p, _len| {
let mut i = 0u;
while i < n_elts {
intrinsics::move_val_init(&mut(*ptr::mut_offset(p, i)), copy t);
i += 1u;
}
}
raw::set_len(&mut v, n_elts);
v
}
}
/// Creates a new unique vector with the same contents as the slice
pub fn to_owned<T:Copy>(t: &[T]) -> ~[T] {
from_fn(t.len(), |i| copy t[i])
}
/// Creates a new vector with a capacity of `capacity`
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
let mut vec = ~[];
reserve(&mut vec, capacity);
vec
}
/**
* Builds a vector by calling a provided function with an argument
* function that pushes an element to the back of a vector.
* This version takes an initial capacity for the vector.
*
* # Arguments
*
* * size - An initial size of the vector to reserve
* * builder - A function that will construct the vector. It receives
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline]
pub fn build_sized<A>(size: uint, builder: &fn(push: &fn(v: A))) -> ~[A] {
let mut vec = with_capacity(size);
builder(|x| vec.push(x));
vec
}
/**
* Builds a vector by calling a provided function with an argument
* function that pushes an element to the back of a vector.
*
* # Arguments
*
* * builder - A function that will construct the vector. It receives
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline]
pub fn build<A>(builder: &fn(push: &fn(v: A))) -> ~[A] {
build_sized(4, builder)
}
/**
* Builds a vector by calling a provided function with an argument
* function that pushes an element to the back of a vector.
* This version takes an initial size for the vector.
*
* # Arguments
*
* * size - An option, maybe containing initial size of the vector to reserve
* * builder - A function that will construct the vector. It receives
* as an argument a function that will push an element
* onto the vector being constructed.
*/
#[inline]
pub fn build_sized_opt<A>(size: Option<uint>,
builder: &fn(push: &fn(v: A)))
-> ~[A] {
build_sized(size.get_or_default(4), builder)
}
// Accessors
/// Returns the first element of a vector
pub fn head<'r,T>(v: &'r [T]) -> &'r T {
if v.len() == 0 { fail!("head: empty vector") }
&v[0]
}
/// Returns `Some(x)` where `x` is the first element of the slice `v`,
/// or `None` if the vector is empty.
pub fn head_opt<'r,T>(v: &'r [T]) -> Option<&'r T> {
if v.len() == 0 { None } else { Some(&v[0]) }
}
/// Returns a vector containing all but the first element of a slice
pub fn tail<'r,T>(v: &'r [T]) -> &'r [T] { slice(v, 1, v.len()) }
/// Returns a vector containing all but the first `n` elements of a slice
pub fn tailn<'r,T>(v: &'r [T], n: uint) -> &'r [T] { slice(v, n, v.len()) }
/// Returns a vector containing all but the last element of a slice
pub fn init<'r,T>(v: &'r [T]) -> &'r [T] { slice(v, 0, v.len() - 1) }
/// Returns a vector containing all but the last `n' elements of a slice
pub fn initn<'r,T>(v: &'r [T], n: uint) -> &'r [T] {
slice(v, 0, v.len() - n)
}
/// Returns the last element of the slice `v`, failing if the slice is empty.
pub fn last<'r,T>(v: &'r [T]) -> &'r T {
if v.len() == 0 { fail!("last: empty vector") }
&v[v.len() - 1]
}
/// Returns `Some(x)` where `x` is the last element of the slice `v`, or
/// `None` if the vector is empty.
pub fn last_opt<'r,T>(v: &'r [T]) -> Option<&'r T> {
if v.len() == 0 { None } else { Some(&v[v.len() - 1]) }
}
/// Return a slice that points into another slice.
#[inline]
pub fn slice<'r,T>(v: &'r [T], start: uint, end: uint) -> &'r [T] {
assert!(start <= end);
assert!(end <= v.len());
do as_imm_buf(v) |p, _len| {
unsafe {
transmute((ptr::offset(p, start),
(end - start) * sys::nonzero_size_of::<T>()))
}
}
}
/// Return a slice that points into another slice.
#[inline]
pub fn mut_slice<'r,T>(v: &'r mut [T], start: uint, end: uint)
-> &'r mut [T] {
assert!(start <= end);
assert!(end <= v.len());
do as_mut_buf(v) |p, _len| {
unsafe {
transmute((ptr::mut_offset(p, start),
(end - start) * sys::nonzero_size_of::<T>()))
}
}
}
/// Return a slice that points into another slice.
#[inline]
pub fn const_slice<'r,T>(v: &'r const [T], start: uint, end: uint)
-> &'r const [T] {
assert!(start <= end);
assert!(end <= v.len());
do as_const_buf(v) |p, _len| {
unsafe {
transmute((ptr::const_offset(p, start),
(end - start) * sys::nonzero_size_of::<T>()))
}
}
}
/// Copies
/// Split the vector `v` by applying each element against the predicate `f`.
pub fn split<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> ~[~[T]] {
let ln = v.len();
if (ln == 0u) { return ~[] }
let mut start = 0u;
let mut result = ~[];
while start < ln {
match position_between(v, start, ln, f) {
None => break,
Some(i) => {
result.push(slice(v, start, i).to_owned());
start = i + 1u;
}
}
}
result.push(slice(v, start, ln).to_owned());
result
}
/**
* Split the vector `v` by applying each element against the predicate `f` up
* to `n` times.
*/
pub fn splitn<T:Copy>(v: &[T], n: uint, f: &fn(t: &T) -> bool) -> ~[~[T]] {
let ln = v.len();
if (ln == 0u) { return ~[] }
let mut start = 0u;
let mut count = n;
let mut result = ~[];
while start < ln && count > 0u {
match position_between(v, start, ln, f) {
None => break,
Some(i) => {
result.push(slice(v, start, i).to_owned());
// Make sure to skip the separator.
start = i + 1u;
count -= 1u;
}
}
}
result.push(slice(v, start, ln).to_owned());
result
}
/**
* Reverse split the vector `v` by applying each element against the predicate
* `f`.
*/
pub fn rsplit<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> ~[~[T]] {
let ln = v.len();
if (ln == 0) { return ~[] }
let mut end = ln;
let mut result = ~[];
while end > 0 {
match rposition_between(v, 0, end, f) {
None => break,
Some(i) => {
result.push(slice(v, i + 1, end).to_owned());
end = i;
}
}
}
result.push(slice(v, 0u, end).to_owned());
reverse(result);
result
}
/**
* Reverse split the vector `v` by applying each element against the predicate
* `f` up to `n times.
*/
pub fn rsplitn<T:Copy>(v: &[T], n: uint, f: &fn(t: &T) -> bool) -> ~[~[T]] {
let ln = v.len();
if (ln == 0u) { return ~[] }
let mut end = ln;
let mut count = n;
let mut result = ~[];
while end > 0u && count > 0u {
match rposition_between(v, 0u, end, f) {
None => break,
Some(i) => {
result.push(slice(v, i + 1u, end).to_owned());
// Make sure to skip the separator.
end = i;
count -= 1u;
}
}
}
result.push(slice(v, 0u, end).to_owned());
reverse(result);
result
}
/**
* Partitions a vector into two new vectors: those that satisfies the
* predicate, and those that do not.
*/
pub fn partition<T>(v: ~[T], f: &fn(&T) -> bool) -> (~[T], ~[T]) {
let mut lefts = ~[];
let mut rights = ~[];
// FIXME (#4355 maybe): using v.consume here crashes
// do v.consume |_, elt| {
do consume(v) |_, elt| {
if f(&elt) {
lefts.push(elt);
} else {
rights.push(elt);
}
}
(lefts, rights)
}
/**
* Partitions a vector into two new vectors: those that satisfies the
* predicate, and those that do not.
*/
pub fn partitioned<T:Copy>(v: &[T], f: &fn(&T) -> bool) -> (~[T], ~[T]) {
let mut lefts = ~[];
let mut rights = ~[];
for v.iter().advance |elt| {
if f(elt) {
lefts.push(copy *elt);
} else {
rights.push(copy *elt);
}
}
(lefts, rights)
}
// Mutators
/// Removes the first element from a vector and return it
pub fn shift<T>(v: &mut ~[T]) -> T {
unsafe {
assert!(!v.is_empty());
if v.len() == 1 { return v.pop() }
if v.len() == 2 {
let last = v.pop();
let first = v.pop();
v.push(last);
return first;
}
let ln = v.len();
let next_ln = v.len() - 1;
// Save the last element. We're going to overwrite its position
let work_elt = v.pop();
// We still should have room to work where what last element was
assert!(capacity(v) >= ln);
// Pretend like we have the original length so we can use
// the vector copy_memory to overwrite the hole we just made
raw::set_len(&mut *v, ln);
// Memcopy the head element (the one we want) to the location we just
// popped. For the moment it unsafely exists at both the head and last
// positions
{
let first_slice = slice(*v, 0, 1);
let last_slice = slice(*v, next_ln, ln);
raw::copy_memory(transmute(last_slice), first_slice, 1);
}
// Memcopy everything to the left one element
{
let init_slice = slice(*v, 0, next_ln);
let tail_slice = slice(*v, 1, ln);
raw::copy_memory(transmute(init_slice),
tail_slice,
next_ln);
}
// Set the new length. Now the vector is back to normal
raw::set_len(&mut *v, next_ln);
// Swap out the element we want from the end
let vp = raw::to_mut_ptr(*v);
let vp = ptr::mut_offset(vp, next_ln - 1);
ptr::replace_ptr(vp, work_elt)
}
}
/// Prepend an element to the vector
pub fn unshift<T>(v: &mut ~[T], x: T) {
let vv = util::replace(v, ~[x]);
v.push_all_move(vv);
}
/// Insert an element at position i within v, shifting all
/// elements after position i one position to the right.
pub fn insert<T>(v: &mut ~[T], i: uint, x: T) {
let len = v.len();
assert!(i <= len);
v.push(x);
let mut j = len;
while j > i {
swap(*v, j, j - 1);
j -= 1;
}
}
/// Remove and return the element at position i within v, shifting
/// all elements after position i one position to the left.
pub fn remove<T>(v: &mut ~[T], i: uint) -> T {
let len = v.len();
assert!(i < len);
let mut j = i;
while j < len - 1 {
swap(*v, j, j + 1);
j += 1;
}
v.pop()
}
/// Consumes all elements, in a vector, moving them out into the / closure
/// provided. The vector is traversed from the start to the end.
///
/// This method does not impose any requirements on the type of the vector being
/// consumed, but it prevents any usage of the vector after this function is
/// called.
///
/// # Examples
///
/// ~~~ {.rust}
/// let v = ~[~"a", ~"b"];
/// do vec::consume(v) |i, s| {
/// // s has type ~str, not &~str
/// io::println(s + fmt!(" %d", i));
/// }
/// ~~~
pub fn consume<T>(mut v: ~[T], f: &fn(uint, v: T)) {
unsafe {
do as_mut_buf(v) |p, ln| {
for uint::range(0, ln) |i| {
// NB: This unsafe operation counts on init writing 0s to the
// holes we create in the vector. That ensures that, if the
// iterator fails then we won't try to clean up the consumed
// elements during unwinding
let x = intrinsics::init();
let p = ptr::mut_offset(p, i);
f(i, ptr::replace_ptr(p, x));
}
}
raw::set_len(&mut v, 0);
}
}
/// Consumes all elements, in a vector, moving them out into the / closure
/// provided. The vectors is traversed in reverse order (from end to start).
///
/// This method does not impose any requirements on the type of the vector being
/// consumed, but it prevents any usage of the vector after this function is
/// called.
pub fn consume_reverse<T>(mut v: ~[T], f: &fn(uint, v: T)) {
unsafe {
do as_mut_buf(v) |p, ln| {
let mut i = ln;
while i > 0 {
i -= 1;
// NB: This unsafe operation counts on init writing 0s to the
// holes we create in the vector. That ensures that, if the
// iterator fails then we won't try to clean up the consumed
// elements during unwinding
let x = intrinsics::init();
let p = ptr::mut_offset(p, i);
f(i, ptr::replace_ptr(p, x));
}
}
raw::set_len(&mut v, 0);
}
}
/// Remove the last element from a vector and return it
pub fn pop<T>(v: &mut ~[T]) -> T {
let ln = v.len();
if ln == 0 {
fail!("sorry, cannot vec::pop an empty vector")
}
let valptr = ptr::to_mut_unsafe_ptr(&mut v[ln - 1u]);
unsafe {
let val = ptr::replace_ptr(valptr, intrinsics::init());
raw::set_len(v, ln - 1u);
val
}
}
/**
* Remove an element from anywhere in the vector and return it, replacing it
* with the last element. This does not preserve ordering, but is O(1).
*
* Fails if index >= length.
*/
pub fn swap_remove<T>(v: &mut ~[T], index: uint) -> T {
let ln = v.len();
if index >= ln {
fail!("vec::swap_remove - index %u >= length %u", index, ln);
}
if index < ln - 1 {
swap(*v, index, ln - 1);
}
v.pop()
}
/// Append an element to a vector
#[inline]
pub fn push<T>(v: &mut ~[T], initval: T) {
unsafe {
let repr: **raw::VecRepr = transmute(&mut *v);
let fill = (**repr).unboxed.fill;
if (**repr).unboxed.alloc > fill {
push_fast(v, initval);
}
else {
push_slow(v, initval);
}
}
}
// This doesn't bother to make sure we have space.
#[inline] // really pretty please
unsafe fn push_fast<T>(v: &mut ~[T], initval: T) {
let repr: **mut raw::VecRepr = transmute(v);
let fill = (**repr).unboxed.fill;
(**repr).unboxed.fill += sys::nonzero_size_of::<T>();
let p = to_unsafe_ptr(&((**repr).unboxed.data));
let p = ptr::offset(p, fill) as *mut T;
intrinsics::move_val_init(&mut(*p), initval);
}
#[inline(never)]
fn push_slow<T>(v: &mut ~[T], initval: T) {
let new_len = v.len() + 1;
reserve_at_least(&mut *v, new_len);
unsafe { push_fast(v, initval) }
}
/// Iterates over the slice `rhs`, copies each element, and then appends it to
/// the vector provided `v`. The `rhs` vector is traversed in-order.
///
/// # Example
///
/// ~~~ {.rust}
/// let mut a = ~[1];
/// vec::push_all(&mut a, [2, 3, 4]);
/// assert!(a == ~[1, 2, 3, 4]);
/// ~~~
#[inline]
pub fn push_all<T:Copy>(v: &mut ~[T], rhs: &const [T]) {
let new_len = v.len() + rhs.len();
reserve(&mut *v, new_len);
for uint::range(0u, rhs.len()) |i| {
push(&mut *v, unsafe { raw::get(rhs, i) })
}
}
/// Takes ownership of the vector `rhs`, moving all elements into the specified
/// vector `v`. This does not copy any elements, and it is illegal to use the
/// `rhs` vector after calling this method (because it is moved here).
///
/// # Example
///
/// ~~~ {.rust}
/// let mut a = ~[~1];
/// vec::push_all_move(&mut a, ~[~2, ~3, ~4]);
/// assert!(a == ~[~1, ~2, ~3, ~4]);
/// ~~~
#[inline]
pub fn push_all_move<T>(v: &mut ~[T], mut rhs: ~[T]) {
let new_len = v.len() + rhs.len();
reserve(&mut *v, new_len);
unsafe {
do as_mut_buf(rhs) |p, len| {
for uint::range(0, len) |i| {
let x = ptr::replace_ptr(ptr::mut_offset(p, i),
intrinsics::uninit());
push(&mut *v, x);
}
}
raw::set_len(&mut rhs, 0);
}
}
/// Shorten a vector, dropping excess elements.
pub fn truncate<T>(v: &mut ~[T], newlen: uint) {
do as_mut_buf(*v) |p, oldlen| {
assert!(newlen <= oldlen);
unsafe {
// This loop is optimized out for non-drop types.
for uint::range(newlen, oldlen) |i| {
ptr::replace_ptr(ptr::mut_offset(p, i), intrinsics::uninit());
}
}
}
unsafe { raw::set_len(&mut *v, newlen); }
}
/**
* Remove consecutive repeated elements from a vector; if the vector is
* sorted, this removes all duplicates.
*/
pub fn dedup<T:Eq>(v: &mut ~[T]) {
unsafe {
if v.len() < 1 { return; }
let mut (last_written, next_to_read) = (0, 1);
do as_const_buf(*v) |p, ln| {
// We have a mutable reference to v, so we can make arbitrary
// changes. (cf. push and pop)
let p = p as *mut T;
// last_written < next_to_read <= ln
while next_to_read < ln {
// last_written < next_to_read < ln
if *ptr::mut_offset(p, next_to_read) ==
*ptr::mut_offset(p, last_written) {
ptr::replace_ptr(ptr::mut_offset(p, next_to_read),
intrinsics::uninit());
} else {
last_written += 1;
// last_written <= next_to_read < ln
if next_to_read != last_written {
ptr::swap_ptr(ptr::mut_offset(p, last_written),
ptr::mut_offset(p, next_to_read));
}
}
// last_written <= next_to_read < ln
next_to_read += 1;
// last_written < next_to_read <= ln
}
}
// last_written < next_to_read == ln
raw::set_len(v, last_written + 1);
}
}
// Appending
/// Iterates over the `rhs` vector, copying each element and appending it to the
/// `lhs`. Afterwards, the `lhs` is then returned for use again.
#[inline]
pub fn append<T:Copy>(lhs: ~[T], rhs: &const [T]) -> ~[T] {
let mut v = lhs;
v.push_all(rhs);
v
}
/// Appends one element to the vector provided. The vector itself is then
/// returned for use again.
#[inline]
pub fn append_one<T>(lhs: ~[T], x: T) -> ~[T] {
let mut v = lhs;
v.push(x);
v
}
/**
* Expands a vector in place, initializing the new elements to a given value
*
* # Arguments
*
* * v - The vector to grow
* * n - The number of elements to add
* * initval - The value for the new elements
*/
pub fn grow<T:Copy>(v: &mut ~[T], n: uint, initval: &T) {
let new_len = v.len() + n;
reserve_at_least(&mut *v, new_len);
let mut i: uint = 0u;
while i < n {
v.push(copy *initval);
i += 1u;
}
}
/**
* Expands a vector in place, initializing the new elements to the result of
* a function
*
* Function `init_op` is called `n` times with the values [0..`n`)
*
* # Arguments
*
* * v - The vector to grow
* * n - The number of elements to add
* * init_op - A function to call to retreive each appended element's
* value
*/
pub fn grow_fn<T>(v: &mut ~[T], n: uint, op: &fn(uint) -> T) {
let new_len = v.len() + n;
reserve_at_least(&mut *v, new_len);
let mut i: uint = 0u;
while i < n {
v.push(op(i));
i += 1u;
}
}
/**
* Sets the value of a vector element at a given index, growing the vector as
* needed
*
* Sets the element at position `index` to `val`. If `index` is past the end
* of the vector, expands the vector by replicating `initval` to fill the
* intervening space.
*/
pub fn grow_set<T:Copy>(v: &mut ~[T], index: uint, initval: &T, val: T) {
let l = v.len();
if index >= l { grow(&mut *v, index - l + 1u, initval); }
v[index] = val;
}
// Functional utilities
/// Apply a function to each element of a vector and return the results
pub fn map<T, U>(v: &[T], f: &fn(t: &T) -> U) -> ~[U] {
let mut result = with_capacity(v.len());
for v.iter().advance |elem| {
result.push(f(elem));
}
result
}
/// Consumes a vector, mapping it into a different vector. This function takes
/// ownership of the supplied vector `v`, moving each element into the closure
/// provided to generate a new element. The vector of new elements is then
/// returned.
///
/// The original vector `v` cannot be used after this function call (it is moved
/// inside), but there are no restrictions on the type of the vector.
pub fn map_consume<T, U>(v: ~[T], f: &fn(v: T) -> U) -> ~[U] {
let mut result = ~[];
do consume(v) |_i, x| {
result.push(f(x));
}
result
}
/// Apply a function to each element of a vector and return the results
pub fn mapi<T, U>(v: &[T], f: &fn(uint, t: &T) -> U) -> ~[U] {
let mut i = 0;
do map(v) |e| {
i += 1;
f(i - 1, e)
}
}
/**
* Apply a function to each element of a vector and return a concatenation
* of each result vector
*/
pub fn flat_map<T, U>(v: &[T], f: &fn(t: &T) -> ~[U]) -> ~[U] {
let mut result = ~[];
for v.iter().advance |elem| { result.push_all_move(f(elem)); }
result
}
/**
* Apply a function to each pair of elements and return the results.
* Equivalent to `map(zip(v0, v1), f)`.
*/
pub fn map_zip<T:Copy,U:Copy,V>(v0: &[T], v1: &[U],
f: &fn(t: &T, v: &U) -> V) -> ~[V] {
let v0_len = v0.len();
if v0_len != v1.len() { fail!(); }
let mut u: ~[V] = ~[];
let mut i = 0u;
while i < v0_len {
u.push(f(&v0[i], &v1[i]));
i += 1u;
}
u
}
pub fn filter_map<T, U>(
v: ~[T],
f: &fn(t: T) -> Option<U>) -> ~[U]
{
/*!
*
* Apply a function to each element of a vector and return the results.
* Consumes the input vector. If function `f` returns `None` then that
* element is excluded from the resulting vector.
*/
let mut result = ~[];
do consume(v) |_, elem| {
match f(elem) {
None => {}
Some(result_elem) => { result.push(result_elem); }
}
}
result
}
pub fn filter_mapped<T, U: Copy>(
v: &[T],
f: &fn(t: &T) -> Option<U>) -> ~[U]
{
/*!
*
* Like `filter_map()`, but operates on a borrowed slice
* and does not consume the input.
*/
let mut result = ~[];
for v.iter().advance |elem| {
match f(elem) {
None => {/* no-op */ }
Some(result_elem) => { result.push(result_elem); }
}
}
result
}
/**
* Construct a new vector from the elements of a vector for which some
* predicate holds.
*
* Apply function `f` to each element of `v` and return a vector containing
* only those elements for which `f` returned true.
*/
pub fn filter<T>(v: ~[T], f: &fn(t: &T) -> bool) -> ~[T] {
let mut result = ~[];
// FIXME (#4355 maybe): using v.consume here crashes
// do v.consume |_, elem| {
do consume(v) |_, elem| {
if f(&elem) { result.push(elem); }
}
result
}
/**
* Construct a new vector from the elements of a vector for which some
* predicate holds.
*
* Apply function `f` to each element of `v` and return a vector containing
* only those elements for which `f` returned true.
*/
pub fn filtered<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> ~[T] {
let mut result = ~[];
for v.iter().advance |elem| {
if f(elem) { result.push(copy *elem); }
}
result
}
/**
* Like `filter()`, but in place. Preserves order of `v`. Linear time.
*/
pub fn retain<T>(v: &mut ~[T], f: &fn(t: &T) -> bool) {
let len = v.len();
let mut deleted: uint = 0;
for uint::range(0, len) |i| {
if !f(&v[i]) {
deleted += 1;
} else if deleted > 0 {
swap(*v, i - deleted, i);
}
}
if deleted > 0 {
v.truncate(len - deleted);
}
}
/// Flattens a vector of vectors of T into a single vector of T.
pub fn concat<T:Copy>(v: &[~[T]]) -> ~[T] { v.concat_vec() }
/// Concatenate a vector of vectors, placing a given separator between each
pub fn connect<T:Copy>(v: &[~[T]], sep: &T) -> ~[T] { v.connect_vec(sep) }
/// Flattens a vector of vectors of T into a single vector of T.
pub fn concat_slices<T:Copy>(v: &[&[T]]) -> ~[T] { v.concat_vec() }
/// Concatenate a vector of vectors, placing a given separator between each
pub fn connect_slices<T:Copy>(v: &[&[T]], sep: &T) -> ~[T] { v.connect_vec(sep) }
#[allow(missing_doc)]
pub trait VectorVector<T> {
// FIXME #5898: calling these .concat and .connect conflicts with
// StrVector::con{cat,nect}, since they have generic contents.
pub fn concat_vec(&self) -> ~[T];
pub fn connect_vec(&self, sep: &T) -> ~[T];
}
impl<'self, T:Copy> VectorVector<T> for &'self [~[T]] {
/// Flattens a vector of slices of T into a single vector of T.
pub fn concat_vec(&self) -> ~[T] {
self.flat_map(|&inner| inner)
}
/// Concatenate a vector of vectors, placing a given separator between each.
pub fn connect_vec(&self, sep: &T) -> ~[T] {
let mut r = ~[];
let mut first = true;
for self.iter().advance |&inner| {
if first { first = false; } else { r.push(copy *sep); }
r.push_all(inner);
}
r
}
}
impl<'self, T:Copy> VectorVector<T> for &'self [&'self [T]] {
/// Flattens a vector of slices of T into a single vector of T.
pub fn concat_vec(&self) -> ~[T] {
self.flat_map(|&inner| inner.to_owned())
}
/// Concatenate a vector of slices, placing a given separator between each.
pub fn connect_vec(&self, sep: &T) -> ~[T] {
let mut r = ~[];
let mut first = true;
for self.iter().advance |&inner| {
if first { first = false; } else { r.push(copy *sep); }
r.push_all(inner);
}
r
}
}
/// Return true if a vector contains an element with the given value
pub fn contains<T:Eq>(v: &[T], x: &T) -> bool {
for v.iter().advance |elt| { if *x == *elt { return true; } }
false
}
/**
* Search for the first element that matches a given predicate within a range
*
* Apply function `f` to each element of `v` within the range
* [`start`, `end`). When function `f` returns true then an option containing
* the element is returned. If `f` matches no elements then none is returned.
*/
pub fn find_between<T:Copy>(v: &[T], start: uint, end: uint,
f: &fn(t: &T) -> bool) -> Option<T> {
position_between(v, start, end, f).map(|i| copy v[*i])
}
/**
* Search for the last element that matches a given predicate
*
* Apply function `f` to each element of `v` in reverse order. When function
* `f` returns true then an option containing the element is returned. If `f`
* matches no elements then none is returned.
*/
pub fn rfind<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> Option<T> {
rfind_between(v, 0u, v.len(), f)
}
/**
* Search for the last element that matches a given predicate within a range
*
* Apply function `f` to each element of `v` in reverse order within the range
* [`start`, `end`). When function `f` returns true then an option containing
* the element is returned. If `f` matches no elements then none is return.
*/
pub fn rfind_between<T:Copy>(v: &[T],
start: uint,
end: uint,
f: &fn(t: &T) -> bool)
-> Option<T> {
rposition_between(v, start, end, f).map(|i| copy v[*i])
}
/// Find the first index containing a matching value
pub fn position_elem<T:Eq>(v: &[T], x: &T) -> Option<uint> {
v.iter().position_(|y| *x == *y)
}
/**
* Find the first index matching some predicate within a range
*
* Apply function `f` to each element of `v` between the range
* [`start`, `end`). When function `f` returns true then an option containing
* the index is returned. If `f` matches no elements then none is returned.
*/
pub fn position_between<T>(v: &[T],
start: uint,
end: uint,
f: &fn(t: &T) -> bool)
-> Option<uint> {
assert!(start <= end);
assert!(end <= v.len());
let mut i = start;
while i < end { if f(&v[i]) { return Some::<uint>(i); } i += 1u; }
None
}
/// Find the last index containing a matching value
pub fn rposition_elem<T:Eq>(v: &[T], x: &T) -> Option<uint> {
rposition(v, |y| *x == *y)
}
/**
* Find the last index matching some predicate
*
* Apply function `f` to each element of `v` in reverse order. When function
* `f` returns true then an option containing the index is returned. If `f`
* matches no elements then none is returned.
*/
pub fn rposition<T>(v: &[T], f: &fn(t: &T) -> bool) -> Option<uint> {
rposition_between(v, 0u, v.len(), f)
}
/**
* Find the last index matching some predicate within a range
*
* Apply function `f` to each element of `v` in reverse order between the
* range [`start`, `end`). When function `f` returns true then an option
* containing the index is returned. If `f` matches no elements then none is
* returned.
*/
pub fn rposition_between<T>(v: &[T], start: uint, end: uint,
f: &fn(t: &T) -> bool) -> Option<uint> {
assert!(start <= end);
assert!(end <= v.len());
let mut i = end;
while i > start {
if f(&v[i - 1u]) { return Some::<uint>(i - 1u); }
i -= 1u;
}
None
}
/**
* Binary search a sorted vector with a comparator function.
*
* The comparator should implement an order consistent with the sort
* order of the underlying vector, returning an order code that indicates
* whether its argument is `Less`, `Equal` or `Greater` the desired target.
*
* Returns the index where the comparator returned `Equal`, or `None` if
* not found.
*/
pub fn bsearch<T>(v: &[T], f: &fn(&T) -> Ordering) -> Option<uint> {
let mut base : uint = 0;
let mut lim : uint = v.len();
while lim != 0 {
let ix = base + (lim >> 1);
match f(&v[ix]) {
Equal => return Some(ix),
Less => {
base = ix + 1;
lim -= 1;
}
Greater => ()
}
lim >>= 1;
}
return None;
}
/**
* Binary search a sorted vector for a given element.
*
* Returns the index of the element or None if not found.
*/
pub fn bsearch_elem<T:TotalOrd>(v: &[T], x: &T) -> Option<uint> {
bsearch(v, |p| p.cmp(x))
}
// FIXME: if issue #586 gets implemented, could have a postcondition
// saying the two result lists have the same length -- or, could
// return a nominal record with a constraint saying that, instead of
// returning a tuple (contingent on issue #869)
/**
* Convert a vector of pairs into a pair of vectors, by reference. As unzip().
*/
pub fn unzip_slice<T:Copy,U:Copy>(v: &[(T, U)]) -> (~[T], ~[U]) {
let mut (ts, us) = (~[], ~[]);
for v.iter().advance |p| {
let (t, u) = copy *p;
ts.push(t);
us.push(u);
}
(ts, us)
}
/**
* Convert a vector of pairs into a pair of vectors.
*
* Returns a tuple containing two vectors where the i-th element of the first
* vector contains the first element of the i-th tuple of the input vector,
* and the i-th element of the second vector contains the second element
* of the i-th tuple of the input vector.
*/
pub fn unzip<T,U>(v: ~[(T, U)]) -> (~[T], ~[U]) {
let mut (ts, us) = (~[], ~[]);
do consume(v) |_i, p| {
let (t, u) = p;
ts.push(t);
us.push(u);
}
(ts, us)
}
/**
* Convert two vectors to a vector of pairs, by reference. As zip().
*/
pub fn zip_slice<T:Copy,U:Copy>(v: &const [T], u: &const [U])
-> ~[(T, U)] {
let mut zipped = ~[];
let sz = v.len();
let mut i = 0u;
assert_eq!(sz, u.len());
while i < sz {
zipped.push((copy v[i], copy u[i]));
i += 1u;
}
zipped
}
/**
* Convert two vectors to a vector of pairs.
*
* Returns a vector of tuples, where the i-th tuple contains the
* i-th elements from each of the input vectors.
*/
pub fn zip<T, U>(mut v: ~[T], mut u: ~[U]) -> ~[(T, U)] {
let mut i = v.len();
assert_eq!(i, u.len());
let mut w = with_capacity(i);
while i > 0 {
w.push((v.pop(),u.pop()));
i -= 1;
}
reverse(w);
w
}
/**
* Swaps two elements in a vector
*
* # Arguments
*
* * v The input vector
* * a - The index of the first element
* * b - The index of the second element
*/
#[inline]
pub fn swap<T>(v: &mut [T], a: uint, b: uint) {
unsafe {
// Can't take two mutable loans from one vector, so instead just cast
// them to their raw pointers to do the swap
let pa: *mut T = &mut v[a];
let pb: *mut T = &mut v[b];
ptr::swap_ptr(pa, pb);
}
}
/// Reverse the order of elements in a vector, in place
pub fn reverse<T>(v: &mut [T]) {
let mut i: uint = 0;
let ln = v.len();
while i < ln / 2 {
swap(v, i, ln - i - 1);
i += 1;
}
}
/// Returns a vector with the order of elements reversed
pub fn reversed<T:Copy>(v: &const [T]) -> ~[T] {
let mut rs: ~[T] = ~[];
let mut i = v.len();
if i == 0 { return (rs); } else { i -= 1; }
while i != 0 { rs.push(copy v[i]); i -= 1; }
rs.push(copy v[0]);
rs
}
/**
* Iterate over all permutations of vector `v`.
*
* Permutations are produced in lexicographic order with respect to the order
* of elements in `v` (so if `v` is sorted then the permutations are
* lexicographically sorted).
*
* The total number of permutations produced is `v.len()!`. If `v` contains
* repeated elements, then some permutations are repeated.
*
* See [Algorithms to generate
* permutations](http://en.wikipedia.org/wiki/Permutation).
*
* # Arguments
*
* * `values` - A vector of values from which the permutations are
* chosen
*
* * `fun` - The function to iterate over the combinations
*/
pub fn each_permutation<T:Copy>(values: &[T], fun: &fn(perm : &[T]) -> bool) -> bool {
let length = values.len();
let mut permutation = vec::from_fn(length, |i| copy values[i]);
if length <= 1 {
fun(permutation);
return true;
}
let mut indices = vec::from_fn(length, |i| i);
loop {
if !fun(permutation) { return true; }
// find largest k such that indices[k] < indices[k+1]
// if no such k exists, all permutations have been generated
let mut k = length - 2;
while k > 0 && indices[k] >= indices[k+1] {
k -= 1;
}
if k == 0 && indices[0] > indices[1] { return true; }
// find largest l such that indices[k] < indices[l]
// k+1 is guaranteed to be such
let mut l = length - 1;
while indices[k] >= indices[l] {
l -= 1;
}
// swap indices[k] and indices[l]; sort indices[k+1..]
// (they're just reversed)
vec::swap(indices, k, l);
reverse(indices.mut_slice(k+1, length));
// fixup permutation based on indices
for uint::range(k, length) |i| {
permutation[i] = copy values[indices[i]];
}
}
}
/**
* Iterate over all contiguous windows of length `n` of the vector `v`.
*
* # Example
*
* Print the adjacent pairs of a vector (i.e. `[1,2]`, `[2,3]`, `[3,4]`)
*
* ~~~ {.rust}
* for windowed(2, &[1,2,3,4]) |v| {
* io::println(fmt!("%?", v));
* }
* ~~~
*
*/
pub fn windowed<'r, T>(n: uint, v: &'r [T], it: &fn(&'r [T]) -> bool) -> bool {
assert!(1u <= n);
if n > v.len() { return true; }
for uint::range(0, v.len() - n + 1) |i| {
if !it(v.slice(i, i + n)) { return false; }
}
return true;
}
/**
* Work with the buffer of a vector.
*
* Allows for unsafe manipulation of vector contents, which is useful for
* foreign interop.
*/
#[inline]
pub fn as_imm_buf<T,U>(s: &[T],
/* NB---this CANNOT be const, see below */
f: &fn(*T, uint) -> U) -> U {
// NB---Do not change the type of s to `&const [T]`. This is
// unsound. The reason is that we are going to create immutable pointers
// into `s` and pass them to `f()`, but in fact they are potentially
// pointing at *mutable memory*. Use `as_const_buf` or `as_mut_buf`
// instead!
unsafe {
let v : *(*T,uint) = transmute(&s);
let (buf,len) = *v;
f(buf, len / sys::nonzero_size_of::<T>())
}
}
/// Similar to `as_imm_buf` but passing a `*const T`
#[inline]
pub fn as_const_buf<T,U>(s: &const [T], f: &fn(*const T, uint) -> U) -> U {
unsafe {
let v : *(*const T,uint) = transmute(&s);
let (buf,len) = *v;
f(buf, len / sys::nonzero_size_of::<T>())
}
}
/// Similar to `as_imm_buf` but passing a `*mut T`
#[inline]
pub fn as_mut_buf<T,U>(s: &mut [T], f: &fn(*mut T, uint) -> U) -> U {
unsafe {
let v : *(*mut T,uint) = transmute(&s);
let (buf,len) = *v;
f(buf, len / sys::nonzero_size_of::<T>())
}
}
// Equality
/// Tests whether two slices are equal to one another. This is only true if both
/// slices are of the same length, and each of the corresponding elements return
/// true when queried via the `eq` function.
fn eq<T: Eq>(a: &[T], b: &[T]) -> bool {
let (a_len, b_len) = (a.len(), b.len());
if a_len != b_len { return false; }
let mut i = 0;
while i < a_len {
if a[i] != b[i] { return false; }
i += 1;
}
true
}
/// Similar to the `vec::eq` function, but this is defined for types which
/// implement `TotalEq` as opposed to types which implement `Eq`. Equality
/// comparisons are done via the `equals` function instead of `eq`.
fn equals<T: TotalEq>(a: &[T], b: &[T]) -> bool {
let (a_len, b_len) = (a.len(), b.len());
if a_len != b_len { return false; }
let mut i = 0;
while i < a_len {
if !a[i].equals(&b[i]) { return false; }
i += 1;
}
true
}
#[cfg(not(test))]
impl<'self,T:Eq> Eq for &'self [T] {
#[inline]
fn eq(&self, other: & &'self [T]) -> bool { eq(*self, *other) }
#[inline]
fn ne(&self, other: & &'self [T]) -> bool { !self.eq(other) }
}
#[cfg(not(test))]
impl<T:Eq> Eq for ~[T] {
#[inline]
fn eq(&self, other: &~[T]) -> bool { eq(*self, *other) }
#[inline]
fn ne(&self, other: &~[T]) -> bool { !self.eq(other) }
}
#[cfg(not(test))]
impl<T:Eq> Eq for @[T] {
#[inline]
fn eq(&self, other: &@[T]) -> bool { eq(*self, *other) }
#[inline]
fn ne(&self, other: &@[T]) -> bool { !self.eq(other) }
}
#[cfg(not(test))]
impl<'self,T:TotalEq> TotalEq for &'self [T] {
#[inline]
fn equals(&self, other: & &'self [T]) -> bool { equals(*self, *other) }
}
#[cfg(not(test))]
impl<T:TotalEq> TotalEq for ~[T] {
#[inline]
fn equals(&self, other: &~[T]) -> bool { equals(*self, *other) }
}
#[cfg(not(test))]
impl<T:TotalEq> TotalEq for @[T] {
#[inline]
fn equals(&self, other: &@[T]) -> bool { equals(*self, *other) }
}
#[cfg(not(test))]
impl<'self,T:Eq> Equiv<~[T]> for &'self [T] {
#[inline]
fn equiv(&self, other: &~[T]) -> bool { eq(*self, *other) }
}
// Lexicographical comparison
fn cmp<T: TotalOrd>(a: &[T], b: &[T]) -> Ordering {
let low = uint::min(a.len(), b.len());
for uint::range(0, low) |idx| {
match a[idx].cmp(&b[idx]) {
Greater => return Greater,
Less => return Less,
Equal => ()
}
}
a.len().cmp(&b.len())
}
#[cfg(not(test))]
impl<'self,T:TotalOrd> TotalOrd for &'self [T] {
#[inline]
fn cmp(&self, other: & &'self [T]) -> Ordering { cmp(*self, *other) }
}
#[cfg(not(test))]
impl<T: TotalOrd> TotalOrd for ~[T] {
#[inline]
fn cmp(&self, other: &~[T]) -> Ordering { cmp(*self, *other) }
}
#[cfg(not(test))]
impl<T: TotalOrd> TotalOrd for @[T] {
#[inline]
fn cmp(&self, other: &@[T]) -> Ordering { cmp(*self, *other) }
}
fn lt<T:Ord>(a: &[T], b: &[T]) -> bool {
let (a_len, b_len) = (a.len(), b.len());
let end = uint::min(a_len, b_len);
let mut i = 0;
while i < end {
let (c_a, c_b) = (&a[i], &b[i]);
if *c_a < *c_b { return true; }
if *c_a > *c_b { return false; }
i += 1;
}
a_len < b_len
}
fn le<T:Ord>(a: &[T], b: &[T]) -> bool { !lt(b, a) }
fn ge<T:Ord>(a: &[T], b: &[T]) -> bool { !lt(a, b) }
fn gt<T:Ord>(a: &[T], b: &[T]) -> bool { lt(b, a) }
#[cfg(not(test))]
impl<'self,T:Ord> Ord for &'self [T] {
#[inline]
fn lt(&self, other: & &'self [T]) -> bool { lt((*self), (*other)) }
#[inline]
fn le(&self, other: & &'self [T]) -> bool { le((*self), (*other)) }
#[inline]
fn ge(&self, other: & &'self [T]) -> bool { ge((*self), (*other)) }
#[inline]
fn gt(&self, other: & &'self [T]) -> bool { gt((*self), (*other)) }
}
#[cfg(not(test))]
impl<T:Ord> Ord for ~[T] {
#[inline]
fn lt(&self, other: &~[T]) -> bool { lt((*self), (*other)) }
#[inline]
fn le(&self, other: &~[T]) -> bool { le((*self), (*other)) }
#[inline]
fn ge(&self, other: &~[T]) -> bool { ge((*self), (*other)) }
#[inline]
fn gt(&self, other: &~[T]) -> bool { gt((*self), (*other)) }
}
#[cfg(not(test))]
impl<T:Ord> Ord for @[T] {
#[inline]
fn lt(&self, other: &@[T]) -> bool { lt((*self), (*other)) }
#[inline]
fn le(&self, other: &@[T]) -> bool { le((*self), (*other)) }
#[inline]
fn ge(&self, other: &@[T]) -> bool { ge((*self), (*other)) }
#[inline]
fn gt(&self, other: &@[T]) -> bool { gt((*self), (*other)) }
}
#[cfg(not(test))]
pub mod traits {
use kinds::Copy;
use ops::Add;
use vec::append;
impl<'self,T:Copy> Add<&'self const [T],~[T]> for ~[T] {
#[inline]
fn add(&self, rhs: & &'self const [T]) -> ~[T] {
append(copy *self, (*rhs))
}
}
}
impl<'self, T> Container for &'self const [T] {
/// Returns true if a vector contains no elements
#[inline]
fn is_empty(&self) -> bool {
as_const_buf(*self, |_p, len| len == 0u)
}
/// Returns the length of a vector
#[inline]
fn len(&self) -> uint {
as_const_buf(*self, |_p, len| len)
}
}
impl<T> Container for ~[T] {
/// Returns true if a vector contains no elements
#[inline]
fn is_empty(&self) -> bool {
as_const_buf(*self, |_p, len| len == 0u)
}
/// Returns the length of a vector
#[inline]
fn len(&self) -> uint {
as_const_buf(*self, |_p, len| len)
}
}
#[allow(missing_doc)]
pub trait CopyableVector<T> {
fn to_owned(&self) -> ~[T];
}
/// Extension methods for vectors
impl<'self,T:Copy> CopyableVector<T> for &'self [T] {
/// Returns a copy of `v`.
#[inline]
fn to_owned(&self) -> ~[T] {
let mut result = ~[];
reserve(&mut result, self.len());
for self.iter().advance |e| {
result.push(copy *e);
}
result
}
}
#[allow(missing_doc)]
pub trait ImmutableVector<'self, T> {
fn slice(&self, start: uint, end: uint) -> &'self [T];
fn iter(self) -> VecIterator<'self, T>;
fn rev_iter(self) -> VecRevIterator<'self, T>;
fn head(&self) -> &'self T;
fn head_opt(&self) -> Option<&'self T>;
fn tail(&self) -> &'self [T];
fn tailn(&self, n: uint) -> &'self [T];
fn init(&self) -> &'self [T];
fn initn(&self, n: uint) -> &'self [T];
fn last(&self) -> &'self T;
fn last_opt(&self) -> Option<&'self T>;
fn rposition(&self, f: &fn(t: &T) -> bool) -> Option<uint>;
fn map<U>(&self, f: &fn(t: &T) -> U) -> ~[U];
fn mapi<U>(&self, f: &fn(uint, t: &T) -> U) -> ~[U];
fn map_r<U>(&self, f: &fn(x: &T) -> U) -> ~[U];
fn flat_map<U>(&self, f: &fn(t: &T) -> ~[U]) -> ~[U];
fn filter_mapped<U:Copy>(&self, f: &fn(t: &T) -> Option<U>) -> ~[U];
unsafe fn unsafe_ref(&self, index: uint) -> *T;
}
/// Extension methods for vectors
impl<'self,T> ImmutableVector<'self, T> for &'self [T] {
/// Return a slice that points into another slice.
#[inline]
fn slice(&self, start: uint, end: uint) -> &'self [T] {
slice(*self, start, end)
}
#[inline]
fn iter(self) -> VecIterator<'self, T> {
unsafe {
let p = vec::raw::to_ptr(self);
VecIterator{ptr: p, end: p.offset(self.len()),
lifetime: cast::transmute(p)}
}
}
#[inline]
fn rev_iter(self) -> VecRevIterator<'self, T> {
unsafe {
let p = vec::raw::to_ptr(self);
VecRevIterator{ptr: p.offset(self.len() - 1),
end: p.offset(-1),
lifetime: cast::transmute(p)}
}
}
/// Returns the first element of a vector, failing if the vector is empty.
#[inline]
fn head(&self) -> &'self T { head(*self) }
/// Returns the first element of a vector
#[inline]
fn head_opt(&self) -> Option<&'self T> { head_opt(*self) }
/// Returns all but the first element of a vector
#[inline]
fn tail(&self) -> &'self [T] { tail(*self) }
/// Returns all but the first `n' elements of a vector
#[inline]
fn tailn(&self, n: uint) -> &'self [T] { tailn(*self, n) }
/// Returns all but the last elemnt of a vector
#[inline]
fn init(&self) -> &'self [T] { init(*self) }
/// Returns all but the last `n' elemnts of a vector
#[inline]
fn initn(&self, n: uint) -> &'self [T] { initn(*self, n) }
/// Returns the last element of a `v`, failing if the vector is empty.
#[inline]
fn last(&self) -> &'self T { last(*self) }
/// Returns the last element of a `v`, failing if the vector is empty.
#[inline]
fn last_opt(&self) -> Option<&'self T> { last_opt(*self) }
/**
* Find the last index matching some predicate
*
* Apply function `f` to each element of `v` in reverse order. When
* function `f` returns true then an option containing the index is
* returned. If `f` matches no elements then none is returned.
*/
#[inline]
fn rposition(&self, f: &fn(t: &T) -> bool) -> Option<uint> {
rposition(*self, f)
}
/// Apply a function to each element of a vector and return the results
#[inline]
fn map<U>(&self, f: &fn(t: &T) -> U) -> ~[U] { map(*self, f) }
/**
* Apply a function to the index and value of each element in the vector
* and return the results
*/
fn mapi<U>(&self, f: &fn(uint, t: &T) -> U) -> ~[U] {
mapi(*self, f)
}
#[inline]
fn map_r<U>(&self, f: &fn(x: &T) -> U) -> ~[U] {
let mut r = ~[];
let mut i = 0;
while i < self.len() {
r.push(f(&self[i]));
i += 1;
}
r
}
/**
* Apply a function to each element of a vector and return a concatenation
* of each result vector
*/
#[inline]
fn flat_map<U>(&self, f: &fn(t: &T) -> ~[U]) -> ~[U] {
flat_map(*self, f)
}
/**
* Apply a function to each element of a vector and return the results
*
* If function `f` returns `none` then that element is excluded from
* the resulting vector.
*/
#[inline]
fn filter_mapped<U:Copy>(&self, f: &fn(t: &T) -> Option<U>) -> ~[U] {
filter_mapped(*self, f)
}
/// Returns a pointer to the element at the given index, without doing
/// bounds checking.
#[inline]
unsafe fn unsafe_ref(&self, index: uint) -> *T {
let (ptr, _): (*T, uint) = transmute(*self);
ptr.offset(index)
}
}
#[allow(missing_doc)]
pub trait ImmutableEqVector<T:Eq> {
fn position_elem(&self, t: &T) -> Option<uint>;
fn rposition_elem(&self, t: &T) -> Option<uint>;
}
impl<'self,T:Eq> ImmutableEqVector<T> for &'self [T] {
/// Find the first index containing a matching value
#[inline]
fn position_elem(&self, x: &T) -> Option<uint> {
position_elem(*self, x)
}
/// Find the last index containing a matching value
#[inline]
fn rposition_elem(&self, t: &T) -> Option<uint> {
rposition_elem(*self, t)
}
}
#[allow(missing_doc)]
pub trait ImmutableCopyableVector<T> {
fn filtered(&self, f: &fn(&T) -> bool) -> ~[T];
fn rfind(&self, f: &fn(t: &T) -> bool) -> Option<T>;
fn partitioned(&self, f: &fn(&T) -> bool) -> (~[T], ~[T]);
unsafe fn unsafe_get(&self, elem: uint) -> T;
}
/// Extension methods for vectors
impl<'self,T:Copy> ImmutableCopyableVector<T> for &'self [T] {
/**
* Construct a new vector from the elements of a vector for which some
* predicate holds.
*
* Apply function `f` to each element of `v` and return a vector
* containing only those elements for which `f` returned true.
*/
#[inline]
fn filtered(&self, f: &fn(t: &T) -> bool) -> ~[T] {
filtered(*self, f)
}
/**
* Search for the last element that matches a given predicate
*
* Apply function `f` to each element of `v` in reverse order. When
* function `f` returns true then an option containing the element is
* returned. If `f` matches no elements then none is returned.
*/
#[inline]
fn rfind(&self, f: &fn(t: &T) -> bool) -> Option<T> {
rfind(*self, f)
}
/**
* Partitions the vector into those that satisfies the predicate, and
* those that do not.
*/
#[inline]
fn partitioned(&self, f: &fn(&T) -> bool) -> (~[T], ~[T]) {
partitioned(*self, f)
}
/// Returns the element at the given index, without doing bounds checking.
#[inline]
unsafe fn unsafe_get(&self, index: uint) -> T {
copy *self.unsafe_ref(index)
}
}
#[allow(missing_doc)]
pub trait OwnedVector<T> {
fn push(&mut self, t: T);
fn push_all_move(&mut self, rhs: ~[T]);
fn pop(&mut self) -> T;
fn shift(&mut self) -> T;
fn unshift(&mut self, x: T);
fn insert(&mut self, i: uint, x:T);
fn remove(&mut self, i: uint) -> T;
fn swap_remove(&mut self, index: uint) -> T;
fn truncate(&mut self, newlen: uint);
fn retain(&mut self, f: &fn(t: &T) -> bool);
fn consume(self, f: &fn(uint, v: T));
fn consume_reverse(self, f: &fn(uint, v: T));
fn filter(self, f: &fn(t: &T) -> bool) -> ~[T];
fn partition(self, f: &fn(&T) -> bool) -> (~[T], ~[T]);
fn grow_fn(&mut self, n: uint, op: &fn(uint) -> T);
}
impl<T> OwnedVector<T> for ~[T] {
#[inline]
fn push(&mut self, t: T) {
push(self, t);
}
#[inline]
fn push_all_move(&mut self, rhs: ~[T]) {
push_all_move(self, rhs);
}
#[inline]
fn pop(&mut self) -> T {
pop(self)
}
#[inline]
fn shift(&mut self) -> T {
shift(self)
}
#[inline]
fn unshift(&mut self, x: T) {
unshift(self, x)
}
#[inline]
fn insert(&mut self, i: uint, x:T) {
insert(self, i, x)
}
#[inline]
fn remove(&mut self, i: uint) -> T {
remove(self, i)
}
#[inline]
fn swap_remove(&mut self, index: uint) -> T {
swap_remove(self, index)
}
#[inline]
fn truncate(&mut self, newlen: uint) {
truncate(self, newlen);
}
#[inline]
fn retain(&mut self, f: &fn(t: &T) -> bool) {
retain(self, f);
}
#[inline]
fn consume(self, f: &fn(uint, v: T)) {
consume(self, f)
}
#[inline]
fn consume_reverse(self, f: &fn(uint, v: T)) {
consume_reverse(self, f)
}
#[inline]
fn filter(self, f: &fn(&T) -> bool) -> ~[T] {
filter(self, f)
}
/**
* Partitions the vector into those that satisfies the predicate, and
* those that do not.
*/
#[inline]
fn partition(self, f: &fn(&T) -> bool) -> (~[T], ~[T]) {
partition(self, f)
}
#[inline]
fn grow_fn(&mut self, n: uint, op: &fn(uint) -> T) {
grow_fn(self, n, op);
}
}
impl<T> Mutable for ~[T] {
/// Clear the vector, removing all values.
fn clear(&mut self) { self.truncate(0) }
}
#[allow(missing_doc)]
pub trait OwnedCopyableVector<T:Copy> {
fn push_all(&mut self, rhs: &const [T]);
fn grow(&mut self, n: uint, initval: &T);
fn grow_set(&mut self, index: uint, initval: &T, val: T);
}
impl<T:Copy> OwnedCopyableVector<T> for ~[T] {
#[inline]
fn push_all(&mut self, rhs: &const [T]) {
push_all(self, rhs);
}
#[inline]
fn grow(&mut self, n: uint, initval: &T) {
grow(self, n, initval);
}
#[inline]
fn grow_set(&mut self, index: uint, initval: &T, val: T) {
grow_set(self, index, initval, val);
}
}
#[allow(missing_doc)]
trait OwnedEqVector<T:Eq> {
fn dedup(&mut self);
}
impl<T:Eq> OwnedEqVector<T> for ~[T] {
#[inline]
fn dedup(&mut self) {
dedup(self)
}
}
#[allow(missing_doc)]
pub trait MutableVector<'self, T> {
fn mut_slice(self, start: uint, end: uint) -> &'self mut [T];
fn mut_iter(self) -> VecMutIterator<'self, T>;
fn mut_rev_iter(self) -> VecMutRevIterator<'self, T>;
/**
* Consumes `src` and moves as many elements as it can into `self`
* from the range [start,end).
*
* Returns the number of elements copied (the shorter of self.len()
* and end - start).
*
* # Arguments
*
* * src - A mutable vector of `T`
* * start - The index into `src` to start copying from
* * end - The index into `str` to stop copying from
*/
fn move_from(self, src: ~[T], start: uint, end: uint) -> uint;
unsafe fn unsafe_mut_ref(&self, index: uint) -> *mut T;
unsafe fn unsafe_set(&self, index: uint, val: T);
}
impl<'self,T> MutableVector<'self, T> for &'self mut [T] {
#[inline]
fn mut_slice(self, start: uint, end: uint) -> &'self mut [T] {
mut_slice(self, start, end)
}
#[inline]
fn mut_iter(self) -> VecMutIterator<'self, T> {
unsafe {
let p = vec::raw::to_mut_ptr(self);
VecMutIterator{ptr: p, end: p.offset(self.len()),
lifetime: cast::transmute(p)}
}
}
fn mut_rev_iter(self) -> VecMutRevIterator<'self, T> {
unsafe {
let p = vec::raw::to_mut_ptr(self);
VecMutRevIterator{ptr: p.offset(self.len() - 1),
end: p.offset(-1),
lifetime: cast::transmute(p)}
}
}
#[inline]
fn move_from(self, mut src: ~[T], start: uint, end: uint) -> uint {
for self.mut_iter().zip(src.mut_slice(start, end).mut_iter()).advance |(a, b)| {
util::swap(a, b);
}
cmp::min(self.len(), end-start)
}
#[inline]
unsafe fn unsafe_mut_ref(&self, index: uint) -> *mut T {
let pair_ptr: &(*mut T, uint) = transmute(self);
let (ptr, _) = *pair_ptr;
ptr.offset(index)
}
#[inline]
unsafe fn unsafe_set(&self, index: uint, val: T) {
*self.unsafe_mut_ref(index) = val;
}
}
/// Trait for ~[T] where T is Cloneable
pub trait MutableCloneableVector<T> {
/// Copies as many elements from `src` as it can into `self`
/// (the shorter of self.len() and src.len()). Returns the number of elements copied.
fn copy_from(self, &[T]) -> uint;
}
impl<'self, T:Clone> MutableCloneableVector<T> for &'self mut [T] {
#[inline]
fn copy_from(self, src: &[T]) -> uint {
for self.mut_iter().zip(src.iter()).advance |(a, b)| {
*a = b.clone();
}
cmp::min(self.len(), src.len())
}
}
/**
* Constructs a vector from an unsafe pointer to a buffer
*
* # Arguments
*
* * ptr - An unsafe pointer to a buffer of `T`
* * elts - The number of elements in the buffer
*/
// Wrapper for fn in raw: needs to be called by net_tcp::on_tcp_read_cb
pub unsafe fn from_buf<T>(ptr: *T, elts: uint) -> ~[T] {
raw::from_buf_raw(ptr, elts)
}
/// The internal 'unboxed' representation of a vector
#[allow(missing_doc)]
pub struct UnboxedVecRepr {
fill: uint,
alloc: uint,
data: u8
}
/// Unsafe operations
pub mod raw {
use cast::transmute;
use kinds::Copy;
use managed;
use option::{None, Some};
use ptr;
use sys;
use unstable::intrinsics;
use vec::{UnboxedVecRepr, as_const_buf, as_mut_buf, with_capacity};
use util;
/// The internal representation of a (boxed) vector
#[allow(missing_doc)]
pub struct VecRepr {
box_header: managed::raw::BoxHeaderRepr,
unboxed: UnboxedVecRepr
}
/// The internal representation of a slice
pub struct SliceRepr {
/// Pointer to the base of this slice
data: *u8,
/// The length of the slice
len: uint
}
/**
* Sets the length of a vector
*
* This will explicitly set the size of the vector, without actually
* modifing its buffers, so it is up to the caller to ensure that
* the vector is actually the specified size.
*/
#[inline]
pub unsafe fn set_len<T>(v: &mut ~[T], new_len: uint) {
let repr: **mut VecRepr = transmute(v);
(**repr).unboxed.fill = new_len * sys::nonzero_size_of::<T>();
}
/**
* Returns an unsafe pointer to the vector's buffer
*
* The caller must ensure that the vector outlives the pointer this
* function returns, or else it will end up pointing to garbage.
*
* Modifying the vector may cause its buffer to be reallocated, which
* would also make any pointers to it invalid.
*/
#[inline]
pub fn to_ptr<T>(v: &[T]) -> *T {
unsafe {
let repr: **SliceRepr = transmute(&v);
transmute(&((**repr).data))
}
}
/** see `to_ptr()` */
#[inline]
pub fn to_const_ptr<T>(v: &const [T]) -> *const T {
unsafe {
let repr: **SliceRepr = transmute(&v);
transmute(&((**repr).data))
}
}
/** see `to_ptr()` */
#[inline]
pub fn to_mut_ptr<T>(v: &mut [T]) -> *mut T {
unsafe {
let repr: **SliceRepr = transmute(&v);
transmute(&((**repr).data))
}
}
/**
* Form a slice from a pointer and length (as a number of units,
* not bytes).
*/
#[inline]
pub unsafe fn buf_as_slice<T,U>(p: *T,
len: uint,
f: &fn(v: &[T]) -> U) -> U {
let pair = (p, len * sys::nonzero_size_of::<T>());
let v : *(&'blk [T]) = transmute(&pair);
f(*v)
}
/**
* Form a slice from a pointer and length (as a number of units,
* not bytes).
*/
#[inline]
pub unsafe fn mut_buf_as_slice<T,U>(p: *mut T,
len: uint,
f: &fn(v: &mut [T]) -> U) -> U {
let pair = (p, len * sys::nonzero_size_of::<T>());
let v : *(&'blk mut [T]) = transmute(&pair);
f(*v)
}
/**
* Unchecked vector indexing.
*/
#[inline]
pub unsafe fn get<T:Copy>(v: &const [T], i: uint) -> T {
as_const_buf(v, |p, _len| copy *ptr::const_offset(p, i))
}
/**
* Unchecked vector index assignment. Does not drop the
* old value and hence is only suitable when the vector
* is newly allocated.
*/
#[inline]
pub unsafe fn init_elem<T>(v: &mut [T], i: uint, val: T) {
let mut box = Some(val);
do as_mut_buf(v) |p, _len| {
let box2 = util::replace(&mut box, None);
intrinsics::move_val_init(&mut(*ptr::mut_offset(p, i)),
box2.unwrap());
}
}
/**
* Constructs a vector from an unsafe pointer to a buffer
*
* # Arguments
*
* * ptr - An unsafe pointer to a buffer of `T`
* * elts - The number of elements in the buffer
*/
// Was in raw, but needs to be called by net_tcp::on_tcp_read_cb
#[inline]
pub unsafe fn from_buf_raw<T>(ptr: *T, elts: uint) -> ~[T] {
let mut dst = with_capacity(elts);
set_len(&mut dst, elts);
as_mut_buf(dst, |p_dst, _len_dst| ptr::copy_memory(p_dst, ptr, elts));
dst
}
/**
* Copies data from one vector to another.
*
* Copies `count` bytes from `src` to `dst`. The source and destination
* may overlap.
*/
#[inline]
pub unsafe fn copy_memory<T>(dst: &mut [T], src: &const [T],
count: uint) {
assert!(dst.len() >= count);
assert!(src.len() >= count);
do as_mut_buf(dst) |p_dst, _len_dst| {
do as_const_buf(src) |p_src, _len_src| {
ptr::copy_memory(p_dst, p_src, count)
}
}
}
}
/// Operations on `[u8]`
pub mod bytes {
use libc;
use uint;
use vec::raw;
use vec;
use ptr;
/// A trait for operations on mutable operations on `[u8]`
pub trait MutableByteVector {
/// Sets all bytes of the receiver to the given value.
pub fn set_memory(self, value: u8);
}
impl<'self> MutableByteVector for &'self mut [u8] {
#[inline]
fn set_memory(self, value: u8) {
do vec::as_mut_buf(self) |p, len| {
unsafe { ptr::set_memory(p, value, len) };
}
}
}
/// Bytewise string comparison
pub fn memcmp(a: &~[u8], b: &~[u8]) -> int {
let a_len = a.len();
let b_len = b.len();
let n = uint::min(a_len, b_len) as libc::size_t;
let r = unsafe {
libc::memcmp(raw::to_ptr(*a) as *libc::c_void,
raw::to_ptr(*b) as *libc::c_void, n) as int
};
if r != 0 { r } else {
if a_len == b_len {
0
} else if a_len < b_len {
-1
} else {
1
}
}
}
/// Bytewise less than or equal
pub fn lt(a: &~[u8], b: &~[u8]) -> bool { memcmp(a, b) < 0 }
/// Bytewise less than or equal
pub fn le(a: &~[u8], b: &~[u8]) -> bool { memcmp(a, b) <= 0 }
/// Bytewise equality
pub fn eq(a: &~[u8], b: &~[u8]) -> bool { memcmp(a, b) == 0 }
/// Bytewise inequality
pub fn ne(a: &~[u8], b: &~[u8]) -> bool { memcmp(a, b) != 0 }
/// Bytewise greater than or equal
pub fn ge(a: &~[u8], b: &~[u8]) -> bool { memcmp(a, b) >= 0 }
/// Bytewise greater than
pub fn gt(a: &~[u8], b: &~[u8]) -> bool { memcmp(a, b) > 0 }
/**
* Copies data from one vector to another.
*
* Copies `count` bytes from `src` to `dst`. The source and destination
* may overlap.
*/
#[inline]
pub fn copy_memory(dst: &mut [u8], src: &const [u8], count: uint) {
// Bound checks are done at vec::raw::copy_memory.
unsafe { vec::raw::copy_memory(dst, src, count) }
}
}
impl<A:Clone> Clone for ~[A] {
#[inline]
fn clone(&self) -> ~[A] {
self.map(|item| item.clone())
}
}
// This works because every lifetime is a sub-lifetime of 'static
impl<'self, A> Zero for &'self [A] {
fn zero() -> &'self [A] { &'self [] }
fn is_zero(&self) -> bool { self.is_empty() }
}
impl<A> Zero for ~[A] {
fn zero() -> ~[A] { ~[] }
fn is_zero(&self) -> bool { self.len() == 0 }
}
impl<A> Zero for @[A] {
fn zero() -> @[A] { @[] }
fn is_zero(&self) -> bool { self.len() == 0 }
}
macro_rules! iterator {
/* FIXME: #4375 Cannot attach documentation/attributes to a macro generated struct.
(struct $name:ident -> $ptr:ty, $elem:ty) => {
pub struct $name<'self, T> {
priv ptr: $ptr,
priv end: $ptr,
priv lifetime: $elem // FIXME: #5922
}
};*/
(impl $name:ident -> $elem:ty, $step:expr) => {
// could be implemented with &[T] with .slice(), but this avoids bounds checks
impl<'self, T> Iterator<$elem> for $name<'self, T> {
#[inline]
fn next(&mut self) -> Option<$elem> {
unsafe {
if self.ptr == self.end {
None
} else {
let old = self.ptr;
self.ptr = self.ptr.offset($step);
Some(cast::transmute(old))
}
}
}
#[inline]
fn size_hint(&self) -> (Option<uint>, Option<uint>) {
let exact = Some(((self.end as uint) - (self.ptr as uint)) / size_of::<$elem>());
(exact, exact)
}
}
}
}
//iterator!{struct VecIterator -> *T, &'self T}
/// An iterator for iterating over a vector
pub struct VecIterator<'self, T> {
priv ptr: *T,
priv end: *T,
priv lifetime: &'self T // FIXME: #5922
}
iterator!{impl VecIterator -> &'self T, 1}
//iterator!{struct VecRevIterator -> *T, &'self T}
/// An iterator for iterating over a vector in reverse
pub struct VecRevIterator<'self, T> {
priv ptr: *T,
priv end: *T,
priv lifetime: &'self T // FIXME: #5922
}
iterator!{impl VecRevIterator -> &'self T, -1}
//iterator!{struct VecMutIterator -> *mut T, &'self mut T}
/// An iterator for mutating the elements of a vector
pub struct VecMutIterator<'self, T> {
priv ptr: *mut T,
priv end: *mut T,
priv lifetime: &'self mut T // FIXME: #5922
}
iterator!{impl VecMutIterator -> &'self mut T, 1}
//iterator!{struct VecMutRevIterator -> *mut T, &'self mut T}
/// An iterator for mutating the elements of a vector in reverse
pub struct VecMutRevIterator<'self, T> {
priv ptr: *mut T,
priv end: *mut T,
priv lifetime: &'self mut T // FIXME: #5922
}
iterator!{impl VecMutRevIterator -> &'self mut T, -1}
impl<T> FromIter<T> for ~[T]{
#[inline]
pub fn from_iter(iter: &fn(f: &fn(T) -> bool) -> bool) -> ~[T] {
let mut v = ~[];
for iter |x| { v.push(x) }
v
}
}
impl<A, T: Iterator<A>> FromIterator<A, T> for ~[A] {
pub fn from_iterator(iterator: &mut T) -> ~[A] {
let mut xs = ~[];
for iterator.advance |x| {
xs.push(x);
}
xs
}
}
/* FIXME: #7341 - ICE
impl<A, T: Iterator<A>> FromIterator<A, T> for ~[A] {
pub fn from_iterator(iterator: &mut T) -> ~[A] {
let (lower, _) = iterator.size_hint();
let mut xs = with_capacity(lower.get_or_zero());
for iterator.advance |x| {
xs.push(x);
}
xs
}
}
*/
#[cfg(test)]
mod tests {
use option::{None, Option, Some};
use sys;
use vec::*;
use cmp::*;
fn square(n: uint) -> uint { n * n }
fn square_ref(n: &uint) -> uint { square(*n) }
fn is_three(n: &uint) -> bool { *n == 3u }
fn is_odd(n: &uint) -> bool { *n % 2u == 1u }
fn is_equal(x: &uint, y:&uint) -> bool { *x == *y }
fn square_if_odd_r(n: &uint) -> Option<uint> {
if *n % 2u == 1u { Some(*n * *n) } else { None }
}
fn square_if_odd_v(n: uint) -> Option<uint> {
if n % 2u == 1u { Some(n * n) } else { None }
}
fn add(x: uint, y: &uint) -> uint { x + *y }
#[test]
fn test_unsafe_ptrs() {
unsafe {
// Test on-stack copy-from-buf.
let a = ~[1, 2, 3];
let mut ptr = raw::to_ptr(a);
let b = from_buf(ptr, 3u);
assert_eq!(b.len(), 3u);
assert_eq!(b[0], 1);
assert_eq!(b[1], 2);
assert_eq!(b[2], 3);
// Test on-heap copy-from-buf.
let c = ~[1, 2, 3, 4, 5];
ptr = raw::to_ptr(c);
let d = from_buf(ptr, 5u);
assert_eq!(d.len(), 5u);
assert_eq!(d[0], 1);
assert_eq!(d[1], 2);
assert_eq!(d[2], 3);
assert_eq!(d[3], 4);
assert_eq!(d[4], 5);
}
}
#[test]
fn test_from_fn() {
// Test on-stack from_fn.
let mut v = from_fn(3u, square);
assert_eq!(v.len(), 3u);
assert_eq!(v[0], 0u);
assert_eq!(v[1], 1u);
assert_eq!(v[2], 4u);
// Test on-heap from_fn.
v = from_fn(5u, square);
assert_eq!(v.len(), 5u);
assert_eq!(v[0], 0u);
assert_eq!(v[1], 1u);
assert_eq!(v[2], 4u);
assert_eq!(v[3], 9u);
assert_eq!(v[4], 16u);
}
#[test]
fn test_from_elem() {
// Test on-stack from_elem.
let mut v = from_elem(2u, 10u);
assert_eq!(v.len(), 2u);
assert_eq!(v[0], 10u);
assert_eq!(v[1], 10u);
// Test on-heap from_elem.
v = from_elem(6u, 20u);
assert_eq!(v[0], 20u);
assert_eq!(v[1], 20u);
assert_eq!(v[2], 20u);
assert_eq!(v[3], 20u);
assert_eq!(v[4], 20u);
assert_eq!(v[5], 20u);
}
#[test]
fn test_is_empty() {
let xs: [int, ..0] = [];
assert!(xs.is_empty());
assert!(![0].is_empty());
}
#[test]
fn test_len_divzero() {
type Z = [i8, ..0];
let v0 : &[Z] = &[];
let v1 : &[Z] = &[[]];
let v2 : &[Z] = &[[], []];
assert_eq!(sys::size_of::<Z>(), 0);
assert_eq!(v0.len(), 0);
assert_eq!(v1.len(), 1);
assert_eq!(v2.len(), 2);
}
#[test]
fn test_head() {
let mut a = ~[11];
assert_eq!(a.head(), &11);
a = ~[11, 12];
assert_eq!(a.head(), &11);
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_head_empty() {
let a: ~[int] = ~[];
a.head();
}
#[test]
fn test_head_opt() {
let mut a = ~[];
assert_eq!(a.head_opt(), None);
a = ~[11];
assert_eq!(a.head_opt().unwrap(), &11);
a = ~[11, 12];
assert_eq!(a.head_opt().unwrap(), &11);
}
#[test]
fn test_tail() {
let mut a = ~[11];
assert_eq!(a.tail(), &[]);
a = ~[11, 12];
assert_eq!(a.tail(), &[12]);
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_tail_empty() {
let a: ~[int] = ~[];
a.tail();
}
#[test]
fn test_tailn() {
let mut a = ~[11, 12, 13];
assert_eq!(a.tailn(0), &[11, 12, 13]);
a = ~[11, 12, 13];
assert_eq!(a.tailn(2), &[13]);
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_tailn_empty() {
let a: ~[int] = ~[];
a.tailn(2);
}
#[test]
fn test_init() {
let mut a = ~[11];
assert_eq!(a.init(), &[]);
a = ~[11, 12];
assert_eq!(a.init(), &[11]);
}
#[init]
#[should_fail]
#[ignore(cfg(windows))]
fn test_init_empty() {
let a: ~[int] = ~[];
a.init();
}
#[test]
fn test_initn() {
let mut a = ~[11, 12, 13];
assert_eq!(a.initn(0), &[11, 12, 13]);
a = ~[11, 12, 13];
assert_eq!(a.initn(2), &[11]);
}
#[init]
#[should_fail]
#[ignore(cfg(windows))]
fn test_initn_empty() {
let a: ~[int] = ~[];
a.initn(2);
}
#[test]
fn test_last() {
let mut a = ~[11];
assert_eq!(a.last(), &11);
a = ~[11, 12];
assert_eq!(a.last(), &12);
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_last_empty() {
let a: ~[int] = ~[];
a.last();
}
#[test]
fn test_last_opt() {
let mut a = ~[];
assert_eq!(a.last_opt(), None);
a = ~[11];
assert_eq!(a.last_opt().unwrap(), &11);
a = ~[11, 12];
assert_eq!(a.last_opt().unwrap(), &12);
}
#[test]
fn test_slice() {
// Test fixed length vector.
let vec_fixed = [1, 2, 3, 4];
let v_a = slice(vec_fixed, 1u, vec_fixed.len()).to_owned();
assert_eq!(v_a.len(), 3u);
assert_eq!(v_a[0], 2);
assert_eq!(v_a[1], 3);
assert_eq!(v_a[2], 4);
// Test on stack.
let vec_stack = &[1, 2, 3];
let v_b = slice(vec_stack, 1u, 3u).to_owned();
assert_eq!(v_b.len(), 2u);
assert_eq!(v_b[0], 2);
assert_eq!(v_b[1], 3);
// Test on managed heap.
let vec_managed = @[1, 2, 3, 4, 5];
let v_c = slice(vec_managed, 0u, 3u).to_owned();
assert_eq!(v_c.len(), 3u);
assert_eq!(v_c[0], 1);
assert_eq!(v_c[1], 2);
assert_eq!(v_c[2], 3);
// Test on exchange heap.
let vec_unique = ~[1, 2, 3, 4, 5, 6];
let v_d = slice(vec_unique, 1u, 6u).to_owned();
assert_eq!(v_d.len(), 5u);
assert_eq!(v_d[0], 2);
assert_eq!(v_d[1], 3);
assert_eq!(v_d[2], 4);
assert_eq!(v_d[3], 5);
assert_eq!(v_d[4], 6);
}
#[test]
fn test_pop() {
// Test on-heap pop.
let mut v = ~[1, 2, 3, 4, 5];
let e = v.pop();
assert_eq!(v.len(), 4u);
assert_eq!(v[0], 1);
assert_eq!(v[1], 2);
assert_eq!(v[2], 3);
assert_eq!(v[3], 4);
assert_eq!(e, 5);
}
#[test]
fn test_swap_remove() {
let mut v = ~[1, 2, 3, 4, 5];
let mut e = v.swap_remove(0);
assert_eq!(v.len(), 4);
assert_eq!(e, 1);
assert_eq!(v[0], 5);
e = v.swap_remove(3);
assert_eq!(v.len(), 3);
assert_eq!(e, 4);
assert_eq!(v[0], 5);
assert_eq!(v[1], 2);
assert_eq!(v[2], 3);
}
#[test]
fn test_swap_remove_noncopyable() {
// Tests that we don't accidentally run destructors twice.
let mut v = ~[::unstable::sync::exclusive(()),
::unstable::sync::exclusive(()),
::unstable::sync::exclusive(())];
let mut _e = v.swap_remove(0);
assert_eq!(v.len(), 2);
_e = v.swap_remove(1);
assert_eq!(v.len(), 1);
_e = v.swap_remove(0);
assert_eq!(v.len(), 0);
}
#[test]
fn test_push() {
// Test on-stack push().
let mut v = ~[];
v.push(1);
assert_eq!(v.len(), 1u);
assert_eq!(v[0], 1);
// Test on-heap push().
v.push(2);
assert_eq!(v.len(), 2u);
assert_eq!(v[0], 1);
assert_eq!(v[1], 2);
}
#[test]
fn test_grow() {
// Test on-stack grow().
let mut v = ~[];
v.grow(2u, &1);
assert_eq!(v.len(), 2u);
assert_eq!(v[0], 1);
assert_eq!(v[1], 1);
// Test on-heap grow().
v.grow(3u, &2);
assert_eq!(v.len(), 5u);
assert_eq!(v[0], 1);
assert_eq!(v[1], 1);
assert_eq!(v[2], 2);
assert_eq!(v[3], 2);
assert_eq!(v[4], 2);
}
#[test]
fn test_grow_fn() {
let mut v = ~[];
v.grow_fn(3u, square);
assert_eq!(v.len(), 3u);
assert_eq!(v[0], 0u);
assert_eq!(v[1], 1u);
assert_eq!(v[2], 4u);
}
#[test]
fn test_grow_set() {
let mut v = ~[1, 2, 3];
v.grow_set(4u, &4, 5);
assert_eq!(v.len(), 5u);
assert_eq!(v[0], 1);
assert_eq!(v[1], 2);
assert_eq!(v[2], 3);
assert_eq!(v[3], 4);
assert_eq!(v[4], 5);
}
#[test]
fn test_truncate() {
let mut v = ~[@6,@5,@4];
v.truncate(1);
assert_eq!(v.len(), 1);
assert_eq!(*(v[0]), 6);
// If the unsafe block didn't drop things properly, we blow up here.
}
#[test]
fn test_clear() {
let mut v = ~[@6,@5,@4];
v.clear();
assert_eq!(v.len(), 0);
// If the unsafe block didn't drop things properly, we blow up here.
}
#[test]
fn test_dedup() {
fn case(a: ~[uint], b: ~[uint]) {
let mut v = a;
v.dedup();
assert_eq!(v, b);
}
case(~[], ~[]);
case(~[1], ~[1]);
case(~[1,1], ~[1]);
case(~[1,2,3], ~[1,2,3]);
case(~[1,1,2,3], ~[1,2,3]);
case(~[1,2,2,3], ~[1,2,3]);
case(~[1,2,3,3], ~[1,2,3]);
case(~[1,1,2,2,2,3,3], ~[1,2,3]);
}
#[test]
fn test_dedup_unique() {
let mut v0 = ~[~1, ~1, ~2, ~3];
v0.dedup();
let mut v1 = ~[~1, ~2, ~2, ~3];
v1.dedup();
let mut v2 = ~[~1, ~2, ~3, ~3];
v2.dedup();
/*
* If the ~pointers were leaked or otherwise misused, valgrind and/or
* rustrt should raise errors.
*/
}
#[test]
fn test_dedup_shared() {
let mut v0 = ~[@1, @1, @2, @3];
v0.dedup();
let mut v1 = ~[@1, @2, @2, @3];
v1.dedup();
let mut v2 = ~[@1, @2, @3, @3];
v2.dedup();
/*
* If the @pointers were leaked or otherwise misused, valgrind and/or
* rustrt should raise errors.
*/
}
#[test]
fn test_map() {
// Test on-stack map.
let mut v = ~[1u, 2u, 3u];
let mut w = map(v, square_ref);
assert_eq!(w.len(), 3u);
assert_eq!(w[0], 1u);
assert_eq!(w[1], 4u);
assert_eq!(w[2], 9u);
// Test on-heap map.
v = ~[1u, 2u, 3u, 4u, 5u];
w = map(v, square_ref);
assert_eq!(w.len(), 5u);
assert_eq!(w[0], 1u);
assert_eq!(w[1], 4u);
assert_eq!(w[2], 9u);
assert_eq!(w[3], 16u);
assert_eq!(w[4], 25u);
}
#[test]
fn test_map_zip() {
fn times(x: &int, y: &int) -> int { *x * *y }
let f = times;
let v0 = ~[1, 2, 3, 4, 5];
let v1 = ~[5, 4, 3, 2, 1];
let u = map_zip::<int, int, int>(v0, v1, f);
let mut i = 0;
while i < 5 { assert!(v0[i] * v1[i] == u[i]); i += 1; }
}
#[test]
fn test_filter_mapped() {
// Test on-stack filter-map.
let mut v = ~[1u, 2u, 3u];
let mut w = filter_mapped(v, square_if_odd_r);
assert_eq!(w.len(), 2u);
assert_eq!(w[0], 1u);
assert_eq!(w[1], 9u);
// Test on-heap filter-map.
v = ~[1u, 2u, 3u, 4u, 5u];
w = filter_mapped(v, square_if_odd_r);
assert_eq!(w.len(), 3u);
assert_eq!(w[0], 1u);
assert_eq!(w[1], 9u);
assert_eq!(w[2], 25u);
fn halve(i: &int) -> Option<int> {
if *i % 2 == 0 {
Some::<int>(*i / 2)
} else {
None::<int>
}
}
fn halve_for_sure(i: &int) -> int { *i / 2 }
let all_even: ~[int] = ~[0, 2, 8, 6];
let all_odd1: ~[int] = ~[1, 7, 3];
let all_odd2: ~[int] = ~[];
let mix: ~[int] = ~[9, 2, 6, 7, 1, 0, 0, 3];
let mix_dest: ~[int] = ~[1, 3, 0, 0];
assert!(filter_mapped(all_even, halve) ==
map(all_even, halve_for_sure));
assert_eq!(filter_mapped(all_odd1, halve), ~[]);
assert_eq!(filter_mapped(all_odd2, halve), ~[]);
assert_eq!(filter_mapped(mix, halve), mix_dest);
}
#[test]
fn test_filter_map() {
// Test on-stack filter-map.
let mut v = ~[1u, 2u, 3u];
let mut w = filter_map(v, square_if_odd_v);
assert_eq!(w.len(), 2u);
assert_eq!(w[0], 1u);
assert_eq!(w[1], 9u);
// Test on-heap filter-map.
v = ~[1u, 2u, 3u, 4u, 5u];
w = filter_map(v, square_if_odd_v);
assert_eq!(w.len(), 3u);
assert_eq!(w[0], 1u);
assert_eq!(w[1], 9u);
assert_eq!(w[2], 25u);
fn halve(i: int) -> Option<int> {
if i % 2 == 0 {
Some::<int>(i / 2)
} else {
None::<int>
}
}
fn halve_for_sure(i: &int) -> int { *i / 2 }
let all_even: ~[int] = ~[0, 2, 8, 6];
let all_even0: ~[int] = copy all_even;
let all_odd1: ~[int] = ~[1, 7, 3];
let all_odd2: ~[int] = ~[];
let mix: ~[int] = ~[9, 2, 6, 7, 1, 0, 0, 3];
let mix_dest: ~[int] = ~[1, 3, 0, 0];
assert!(filter_map(all_even, halve) ==
map(all_even0, halve_for_sure));
assert_eq!(filter_map(all_odd1, halve), ~[]);
assert_eq!(filter_map(all_odd2, halve), ~[]);
assert_eq!(filter_map(mix, halve), mix_dest);
}
#[test]
fn test_filter() {
assert_eq!(filter(~[1u, 2u, 3u], is_odd), ~[1u, 3u]);
assert_eq!(filter(~[1u, 2u, 4u, 8u, 16u], is_three), ~[]);
}
#[test]
fn test_retain() {
let mut v = ~[1, 2, 3, 4, 5];
v.retain(is_odd);
assert_eq!(v, ~[1, 3, 5]);
}
#[test]
fn test_each_permutation() {
let mut results: ~[~[int]];
results = ~[];
for each_permutation([]) |v| { results.push(to_owned(v)); }
assert_eq!(results, ~[~[]]);
results = ~[];
for each_permutation([7]) |v| { results.push(to_owned(v)); }
assert_eq!(results, ~[~[7]]);
results = ~[];
for each_permutation([1,1]) |v| { results.push(to_owned(v)); }
assert_eq!(results, ~[~[1,1],~[1,1]]);
results = ~[];
for each_permutation([5,2,0]) |v| { results.push(to_owned(v)); }
assert!(results ==
~[~[5,2,0],~[5,0,2],~[2,5,0],~[2,0,5],~[0,5,2],~[0,2,5]]);
}
#[test]
fn test_zip_unzip() {
let v1 = ~[1, 2, 3];
let v2 = ~[4, 5, 6];
let z1 = zip(v1, v2);
assert_eq!((1, 4), z1[0]);
assert_eq!((2, 5), z1[1]);
assert_eq!((3, 6), z1[2]);
let (left, right) = unzip(z1);
assert_eq!((1, 4), (left[0], right[0]));
assert_eq!((2, 5), (left[1], right[1]));
assert_eq!((3, 6), (left[2], right[2]));
}
#[test]
fn test_position_elem() {
assert!(position_elem([], &1).is_none());
let v1 = ~[1, 2, 3, 3, 2, 5];
assert_eq!(position_elem(v1, &1), Some(0u));
assert_eq!(position_elem(v1, &2), Some(1u));
assert_eq!(position_elem(v1, &5), Some(5u));
assert!(position_elem(v1, &4).is_none());
}
#[test]
fn test_position_between() {
assert!(position_between([], 0u, 0u, f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert!(position_between(v, 0u, 0u, f).is_none());
assert!(position_between(v, 0u, 1u, f).is_none());
assert_eq!(position_between(v, 0u, 2u, f), Some(1u));
assert_eq!(position_between(v, 0u, 3u, f), Some(1u));
assert_eq!(position_between(v, 0u, 4u, f), Some(1u));
assert!(position_between(v, 1u, 1u, f).is_none());
assert_eq!(position_between(v, 1u, 2u, f), Some(1u));
assert_eq!(position_between(v, 1u, 3u, f), Some(1u));
assert_eq!(position_between(v, 1u, 4u, f), Some(1u));
assert!(position_between(v, 2u, 2u, f).is_none());
assert!(position_between(v, 2u, 3u, f).is_none());
assert_eq!(position_between(v, 2u, 4u, f), Some(3u));
assert!(position_between(v, 3u, 3u, f).is_none());
assert_eq!(position_between(v, 3u, 4u, f), Some(3u));
assert!(position_between(v, 4u, 4u, f).is_none());
}
#[test]
fn test_find_between() {
assert!(find_between([], 0u, 0u, f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert!(find_between(v, 0u, 0u, f).is_none());
assert!(find_between(v, 0u, 1u, f).is_none());
assert_eq!(find_between(v, 0u, 2u, f), Some((1, 'b')));
assert_eq!(find_between(v, 0u, 3u, f), Some((1, 'b')));
assert_eq!(find_between(v, 0u, 4u, f), Some((1, 'b')));
assert!(find_between(v, 1u, 1u, f).is_none());
assert_eq!(find_between(v, 1u, 2u, f), Some((1, 'b')));
assert_eq!(find_between(v, 1u, 3u, f), Some((1, 'b')));
assert_eq!(find_between(v, 1u, 4u, f), Some((1, 'b')));
assert!(find_between(v, 2u, 2u, f).is_none());
assert!(find_between(v, 2u, 3u, f).is_none());
assert_eq!(find_between(v, 2u, 4u, f), Some((3, 'b')));
assert!(find_between(v, 3u, 3u, f).is_none());
assert_eq!(find_between(v, 3u, 4u, f), Some((3, 'b')));
assert!(find_between(v, 4u, 4u, f).is_none());
}
#[test]
fn test_rposition() {
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
fn g(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'd' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert_eq!(rposition(v, f), Some(3u));
assert!(rposition(v, g).is_none());
}
#[test]
fn test_rposition_between() {
assert!(rposition_between([], 0u, 0u, f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert!(rposition_between(v, 0u, 0u, f).is_none());
assert!(rposition_between(v, 0u, 1u, f).is_none());
assert_eq!(rposition_between(v, 0u, 2u, f), Some(1u));
assert_eq!(rposition_between(v, 0u, 3u, f), Some(1u));
assert_eq!(rposition_between(v, 0u, 4u, f), Some(3u));
assert!(rposition_between(v, 1u, 1u, f).is_none());
assert_eq!(rposition_between(v, 1u, 2u, f), Some(1u));
assert_eq!(rposition_between(v, 1u, 3u, f), Some(1u));
assert_eq!(rposition_between(v, 1u, 4u, f), Some(3u));
assert!(rposition_between(v, 2u, 2u, f).is_none());
assert!(rposition_between(v, 2u, 3u, f).is_none());
assert_eq!(rposition_between(v, 2u, 4u, f), Some(3u));
assert!(rposition_between(v, 3u, 3u, f).is_none());
assert_eq!(rposition_between(v, 3u, 4u, f), Some(3u));
assert!(rposition_between(v, 4u, 4u, f).is_none());
}
#[test]
fn test_rfind() {
assert!(rfind([], f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
fn g(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'd' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert_eq!(rfind(v, f), Some((3, 'b')));
assert!(rfind(v, g).is_none());
}
#[test]
fn test_rfind_between() {
assert!(rfind_between([], 0u, 0u, f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert!(rfind_between(v, 0u, 0u, f).is_none());
assert!(rfind_between(v, 0u, 1u, f).is_none());
assert_eq!(rfind_between(v, 0u, 2u, f), Some((1, 'b')));
assert_eq!(rfind_between(v, 0u, 3u, f), Some((1, 'b')));
assert_eq!(rfind_between(v, 0u, 4u, f), Some((3, 'b')));
assert!(rfind_between(v, 1u, 1u, f).is_none());
assert_eq!(rfind_between(v, 1u, 2u, f), Some((1, 'b')));
assert_eq!(rfind_between(v, 1u, 3u, f), Some((1, 'b')));
assert_eq!(rfind_between(v, 1u, 4u, f), Some((3, 'b')));
assert!(rfind_between(v, 2u, 2u, f).is_none());
assert!(rfind_between(v, 2u, 3u, f).is_none());
assert_eq!(rfind_between(v, 2u, 4u, f), Some((3, 'b')));
assert!(rfind_between(v, 3u, 3u, f).is_none());
assert_eq!(rfind_between(v, 3u, 4u, f), Some((3, 'b')));
assert!(rfind_between(v, 4u, 4u, f).is_none());
}
#[test]
fn test_bsearch_elem() {
assert_eq!(bsearch_elem([1,2,3,4,5], &5), Some(4));
assert_eq!(bsearch_elem([1,2,3,4,5], &4), Some(3));
assert_eq!(bsearch_elem([1,2,3,4,5], &3), Some(2));
assert_eq!(bsearch_elem([1,2,3,4,5], &2), Some(1));
assert_eq!(bsearch_elem([1,2,3,4,5], &1), Some(0));
assert_eq!(bsearch_elem([2,4,6,8,10], &1), None);
assert_eq!(bsearch_elem([2,4,6,8,10], &5), None);
assert_eq!(bsearch_elem([2,4,6,8,10], &4), Some(1));
assert_eq!(bsearch_elem([2,4,6,8,10], &10), Some(4));
assert_eq!(bsearch_elem([2,4,6,8], &1), None);
assert_eq!(bsearch_elem([2,4,6,8], &5), None);
assert_eq!(bsearch_elem([2,4,6,8], &4), Some(1));
assert_eq!(bsearch_elem([2,4,6,8], &8), Some(3));
assert_eq!(bsearch_elem([2,4,6], &1), None);
assert_eq!(bsearch_elem([2,4,6], &5), None);
assert_eq!(bsearch_elem([2,4,6], &4), Some(1));
assert_eq!(bsearch_elem([2,4,6], &6), Some(2));
assert_eq!(bsearch_elem([2,4], &1), None);
assert_eq!(bsearch_elem([2,4], &5), None);
assert_eq!(bsearch_elem([2,4], &2), Some(0));
assert_eq!(bsearch_elem([2,4], &4), Some(1));
assert_eq!(bsearch_elem([2], &1), None);
assert_eq!(bsearch_elem([2], &5), None);
assert_eq!(bsearch_elem([2], &2), Some(0));
assert_eq!(bsearch_elem([], &1), None);
assert_eq!(bsearch_elem([], &5), None);
assert!(bsearch_elem([1,1,1,1,1], &1) != None);
assert!(bsearch_elem([1,1,1,1,2], &1) != None);
assert!(bsearch_elem([1,1,1,2,2], &1) != None);
assert!(bsearch_elem([1,1,2,2,2], &1) != None);
assert_eq!(bsearch_elem([1,2,2,2,2], &1), Some(0));
assert_eq!(bsearch_elem([1,2,3,4,5], &6), None);
assert_eq!(bsearch_elem([1,2,3,4,5], &0), None);
}
#[test]
fn reverse_and_reversed() {
let mut v: ~[int] = ~[10, 20];
assert_eq!(v[0], 10);
assert_eq!(v[1], 20);
reverse(v);
assert_eq!(v[0], 20);
assert_eq!(v[1], 10);
let v2 = reversed::<int>([10, 20]);
assert_eq!(v2[0], 20);
assert_eq!(v2[1], 10);
v[0] = 30;
assert_eq!(v2[0], 20);
// Make sure they work with 0-length vectors too.
let v4 = reversed::<int>([]);
assert_eq!(v4, ~[]);
let mut v3: ~[int] = ~[];
reverse::<int>(v3);
}
#[test]
fn reversed_mut() {
let v2 = reversed::<int>([10, 20]);
assert_eq!(v2[0], 20);
assert_eq!(v2[1], 10);
}
#[test]
fn test_split() {
fn f(x: &int) -> bool { *x == 3 }
assert_eq!(split([], f), ~[]);
assert_eq!(split([1, 2], f), ~[~[1, 2]]);
assert_eq!(split([3, 1, 2], f), ~[~[], ~[1, 2]]);
assert_eq!(split([1, 2, 3], f), ~[~[1, 2], ~[]]);
assert_eq!(split([1, 2, 3, 4, 3, 5], f), ~[~[1, 2], ~[4], ~[5]]);
}
#[test]
fn test_splitn() {
fn f(x: &int) -> bool { *x == 3 }
assert_eq!(splitn([], 1u, f), ~[]);
assert_eq!(splitn([1, 2], 1u, f), ~[~[1, 2]]);
assert_eq!(splitn([3, 1, 2], 1u, f), ~[~[], ~[1, 2]]);
assert_eq!(splitn([1, 2, 3], 1u, f), ~[~[1, 2], ~[]]);
assert!(splitn([1, 2, 3, 4, 3, 5], 1u, f) ==
~[~[1, 2], ~[4, 3, 5]]);
}
#[test]
fn test_rsplit() {
fn f(x: &int) -> bool { *x == 3 }
assert_eq!(rsplit([], f), ~[]);
assert_eq!(rsplit([1, 2], f), ~[~[1, 2]]);
assert_eq!(rsplit([1, 2, 3], f), ~[~[1, 2], ~[]]);
assert!(rsplit([1, 2, 3, 4, 3, 5], f) ==
~[~[1, 2], ~[4], ~[5]]);
}
#[test]
fn test_rsplitn() {
fn f(x: &int) -> bool { *x == 3 }
assert_eq!(rsplitn([], 1u, f), ~[]);
assert_eq!(rsplitn([1, 2], 1u, f), ~[~[1, 2]]);
assert_eq!(rsplitn([1, 2, 3], 1u, f), ~[~[1, 2], ~[]]);
assert_eq!(rsplitn([1, 2, 3, 4, 3, 5], 1u, f), ~[~[1, 2, 3, 4], ~[5]]);
}
#[test]
fn test_partition() {
// FIXME (#4355 maybe): using v.partition here crashes
assert_eq!(partition(~[], |x: &int| *x < 3), (~[], ~[]));
assert_eq!(partition(~[1, 2, 3], |x: &int| *x < 4), (~[1, 2, 3], ~[]));
assert_eq!(partition(~[1, 2, 3], |x: &int| *x < 2), (~[1], ~[2, 3]));
assert_eq!(partition(~[1, 2, 3], |x: &int| *x < 0), (~[], ~[1, 2, 3]));
}
#[test]
fn test_partitioned() {
assert_eq!(([]).partitioned(|x: &int| *x < 3), (~[], ~[]))
assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 4), (~[1, 2, 3], ~[]));
assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 2), (~[1], ~[2, 3]));
assert_eq!(([1, 2, 3]).partitioned(|x: &int| *x < 0), (~[], ~[1, 2, 3]));
}
#[test]
fn test_concat() {
assert_eq!(concat([~[1], ~[2,3]]), ~[1, 2, 3]);
assert_eq!([~[1], ~[2,3]].concat_vec(), ~[1, 2, 3]);
assert_eq!(concat_slices([&[1], &[2,3]]), ~[1, 2, 3]);
assert_eq!([&[1], &[2,3]].concat_vec(), ~[1, 2, 3]);
}
#[test]
fn test_connect() {
assert_eq!(connect([], &0), ~[]);
assert_eq!(connect([~[1], ~[2, 3]], &0), ~[1, 0, 2, 3]);
assert_eq!(connect([~[1], ~[2], ~[3]], &0), ~[1, 0, 2, 0, 3]);
assert_eq!([~[1], ~[2, 3]].connect_vec(&0), ~[1, 0, 2, 3]);
assert_eq!([~[1], ~[2], ~[3]].connect_vec(&0), ~[1, 0, 2, 0, 3]);
assert_eq!(connect_slices([], &0), ~[]);
assert_eq!(connect_slices([&[1], &[2, 3]], &0), ~[1, 0, 2, 3]);
assert_eq!(connect_slices([&[1], &[2], &[3]], &0), ~[1, 0, 2, 0, 3]);
assert_eq!([&[1], &[2, 3]].connect_vec(&0), ~[1, 0, 2, 3]);
assert_eq!([&[1], &[2], &[3]].connect_vec(&0), ~[1, 0, 2, 0, 3]);
}
#[test]
fn test_windowed () {
fn t(n: uint, expected: &[&[int]]) {
let mut i = 0;
for windowed(n, [1,2,3,4,5,6]) |v| {
assert_eq!(v, expected[i]);
i += 1;
}
// check that we actually iterated the right number of times
assert_eq!(i, expected.len());
}
t(3, &[&[1,2,3],&[2,3,4],&[3,4,5],&[4,5,6]]);
t(4, &[&[1,2,3,4],&[2,3,4,5],&[3,4,5,6]]);
t(7, &[]);
t(8, &[]);
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_windowed_() {
for windowed (0u, [1u,2u,3u,4u,5u,6u]) |_v| {}
}
#[test]
fn test_unshift() {
let mut x = ~[1, 2, 3];
x.unshift(0);
assert_eq!(x, ~[0, 1, 2, 3]);
}
#[test]
fn test_insert() {
let mut a = ~[1, 2, 4];
a.insert(2, 3);
assert_eq!(a, ~[1, 2, 3, 4]);
let mut a = ~[1, 2, 3];
a.insert(0, 0);
assert_eq!(a, ~[0, 1, 2, 3]);
let mut a = ~[1, 2, 3];
a.insert(3, 4);
assert_eq!(a, ~[1, 2, 3, 4]);
let mut a = ~[];
a.insert(0, 1);
assert_eq!(a, ~[1]);
}
#[test]
#[ignore(cfg(windows))]
#[should_fail]
fn test_insert_oob() {
let mut a = ~[1, 2, 3];
a.insert(4, 5);
}
#[test]
fn test_remove() {
let mut a = ~[1, 2, 3, 4];
a.remove(2);
assert_eq!(a, ~[1, 2, 4]);
let mut a = ~[1, 2, 3];
a.remove(0);
assert_eq!(a, ~[2, 3]);
let mut a = ~[1];
a.remove(0);
assert_eq!(a, ~[]);
}
#[test]
#[ignore(cfg(windows))]
#[should_fail]
fn test_remove_oob() {
let mut a = ~[1, 2, 3];
a.remove(3);
}
#[test]
fn test_capacity() {
let mut v = ~[0u64];
reserve(&mut v, 10u);
assert_eq!(capacity(&v), 10u);
let mut v = ~[0u32];
reserve(&mut v, 10u);
assert_eq!(capacity(&v), 10u);
}
#[test]
fn test_slice_2() {
let v = ~[1, 2, 3, 4, 5];
let v = v.slice(1u, 3u);
assert_eq!(v.len(), 2u);
assert_eq!(v[0], 2);
assert_eq!(v[1], 3);
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_from_fn_fail() {
do from_fn(100) |v| {
if v == 50 { fail!() }
(~0, @0)
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_build_fail() {
do build |push| {
push((~0, @0));
push((~0, @0));
push((~0, @0));
push((~0, @0));
fail!();
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_split_fail_ret_true() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do split(v) |_elt| {
if i == 2 {
fail!()
}
i += 1;
true
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_split_fail_ret_false() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do split(v) |_elt| {
if i == 2 {
fail!()
}
i += 1;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_splitn_fail_ret_true() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do splitn(v, 100) |_elt| {
if i == 2 {
fail!()
}
i += 1;
true
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_splitn_fail_ret_false() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do split(v) |_elt| {
if i == 2 {
fail!()
}
i += 1;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_rsplit_fail_ret_true() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do rsplit(v) |_elt| {
if i == 2 {
fail!()
}
i += 1;
true
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_rsplit_fail_ret_false() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do rsplit(v) |_elt| {
if i == 2 {
fail!()
}
i += 1;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_rsplitn_fail_ret_true() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do rsplitn(v, 100) |_elt| {
if i == 2 {
fail!()
}
i += 1;
true
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_rsplitn_fail_ret_false() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do rsplitn(v, 100) |_elt| {
if i == 2 {
fail!()
}
i += 1;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_consume_fail() {
let v = ~[(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do consume(v) |_i, _elt| {
if i == 2 {
fail!()
}
i += 1;
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_grow_fn_fail() {
let mut v = ~[];
do v.grow_fn(100) |i| {
if i == 50 {
fail!()
}
(~0, @0)
}
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_map_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do map(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
~[(~0, @0)]
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_map_consume_fail() {
let v = ~[(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do map_consume(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
~[(~0, @0)]
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_mapi_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do mapi(v) |_i, _elt| {
if i == 2 {
fail!()
}
i += 0;
~[(~0, @0)]
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_flat_map_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do map(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
~[(~0, @0)]
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_map_zip_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do map_zip(v, v) |_elt1, _elt2| {
if i == 2 {
fail!()
}
i += 0;
~[(~0, @0)]
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_filter_mapped_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do filter_mapped(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
Some((~0, @0))
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_filter_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do v.filtered |_elt| {
if i == 2 {
fail!()
}
i += 0;
true
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_rposition_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do rposition(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_permute_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
for each_permutation(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
}
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_as_imm_buf_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
do as_imm_buf(v) |_buf, _i| {
fail!()
}
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_as_const_buf_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
do as_const_buf(v) |_buf, _i| {
fail!()
}
}
#[test]
#[ignore(cfg(windows))]
#[should_fail]
fn test_as_mut_buf_fail() {
let mut v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
do as_mut_buf(v) |_buf, _i| {
fail!()
}
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_copy_memory_oob() {
unsafe {
let mut a = [1, 2, 3, 4];
let b = [1, 2, 3, 4, 5];
raw::copy_memory(a, b, 5);
}
}
#[test]
fn test_total_ord() {
[1, 2, 3, 4].cmp(& &[1, 2, 3]) == Greater;
[1, 2, 3].cmp(& &[1, 2, 3, 4]) == Less;
[1, 2, 3, 4].cmp(& &[1, 2, 3, 4]) == Equal;
[1, 2, 3, 4, 5, 5, 5, 5].cmp(& &[1, 2, 3, 4, 5, 6]) == Less;
[2, 2].cmp(& &[1, 2, 3, 4]) == Greater;
}
#[test]
fn test_iterator() {
use iterator::*;
let xs = [1, 2, 5, 10, 11];
let mut it = xs.iter();
assert_eq!(it.size_hint(), (Some(5), Some(5)));
assert_eq!(it.next().unwrap(), &1);
assert_eq!(it.size_hint(), (Some(4), Some(4)));
assert_eq!(it.next().unwrap(), &2);
assert_eq!(it.size_hint(), (Some(3), Some(3)));
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.size_hint(), (Some(2), Some(2)));
assert_eq!(it.next().unwrap(), &10);
assert_eq!(it.size_hint(), (Some(1), Some(1)));
assert_eq!(it.next().unwrap(), &11);
assert_eq!(it.size_hint(), (Some(0), Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_mut_iterator() {
use iterator::*;
let mut xs = [1, 2, 3, 4, 5];
for xs.mut_iter().advance |x| {
*x += 1;
}
assert_eq!(xs, [2, 3, 4, 5, 6])
}
#[test]
fn test_rev_iterator() {
use iterator::*;
let xs = [1, 2, 5, 10, 11];
let ys = [11, 10, 5, 2, 1];
let mut i = 0;
for xs.rev_iter().advance |&x| {
assert_eq!(x, ys[i]);
i += 1;
}
assert_eq!(i, 5);
}
#[test]
fn test_mut_rev_iterator() {
use iterator::*;
let mut xs = [1u, 2, 3, 4, 5];
for xs.mut_rev_iter().enumerate().advance |(i,x)| {
*x += i;
}
assert_eq!(xs, [5, 5, 5, 5, 5])
}
#[test]
fn test_move_from() {
let mut a = [1,2,3,4,5];
let b = ~[6,7,8];
assert_eq!(a.move_from(b, 0, 3), 3);
assert_eq!(a, [6,7,8,4,5]);
let mut a = [7,2,8,1];
let b = ~[3,1,4,1,5,9];
assert_eq!(a.move_from(b, 0, 6), 4);
assert_eq!(a, [3,1,4,1]);
let mut a = [1,2,3,4];
let b = ~[5,6,7,8,9,0];
assert_eq!(a.move_from(b, 2, 3), 1);
assert_eq!(a, [7,2,3,4]);
let mut a = [1,2,3,4,5];
let b = ~[5,6,7,8,9,0];
assert_eq!(a.mut_slice(2,4).move_from(b,1,6), 2);
assert_eq!(a, [1,2,6,7,5]);
}
#[test]
fn test_copy_from() {
let mut a = [1,2,3,4,5];
let b = [6,7,8];
assert_eq!(a.copy_from(b), 3);
assert_eq!(a, [6,7,8,4,5]);
let mut c = [7,2,8,1];
let d = [3,1,4,1,5,9];
assert_eq!(c.copy_from(d), 4);
assert_eq!(c, [3,1,4,1]);
}
#[test]
fn test_reverse_part() {
let mut values = [1,2,3,4,5];
reverse(values.mut_slice(1, 4));
assert_eq!(values, [1,4,3,2,5]);
}
#[test]
fn test_permutations0() {
let values = [];
let mut v : ~[~[int]] = ~[];
for each_permutation(values) |p| {
v.push(p.to_owned());
}
assert_eq!(v, ~[~[]]);
}
#[test]
fn test_permutations1() {
let values = [1];
let mut v : ~[~[int]] = ~[];
for each_permutation(values) |p| {
v.push(p.to_owned());
}
assert_eq!(v, ~[~[1]]);
}
#[test]
fn test_permutations2() {
let values = [1,2];
let mut v : ~[~[int]] = ~[];
for each_permutation(values) |p| {
v.push(p.to_owned());
}
assert_eq!(v, ~[~[1,2],~[2,1]]);
}
#[test]
fn test_permutations3() {
let values = [1,2,3];
let mut v : ~[~[int]] = ~[];
for each_permutation(values) |p| {
v.push(p.to_owned());
}
assert_eq!(v, ~[~[1,2,3],~[1,3,2],~[2,1,3],~[2,3,1],~[3,1,2],~[3,2,1]]);
}
#[test]
fn test_vec_zero() {
use num::Zero;
macro_rules! t (
($ty:ty) => {
let v: $ty = Zero::zero();
assert!(v.is_empty());
assert!(v.is_zero());
}
);
t!(&[int]);
t!(@[int]);
t!(~[int]);
}
#[test]
fn test_bytes_set_memory() {
use vec::bytes::MutableByteVector;
let mut values = [1u8,2,3,4,5];
values.mut_slice(0,5).set_memory(0xAB);
assert_eq!(values, [0xAB, 0xAB, 0xAB, 0xAB, 0xAB]);
values.mut_slice(2,4).set_memory(0xFF);
assert_eq!(values, [0xAB, 0xAB, 0xFF, 0xFF, 0xAB]);
}
}
| 28.200449 | 97 | 0.498243 |
11e18a03d3229db20756d438509b754596cc96c6 | 3,577 | //! ra_db defines basic database traits. The concrete DB is defined by ra_ide_api.
mod cancellation;
mod input;
use std::{panic, sync::Arc};
use ra_prof::profile;
use ra_syntax::{Parse, SourceFile, TextRange, TextUnit};
use relative_path::RelativePathBuf;
pub use crate::{
cancellation::Canceled,
input::{CrateGraph, CrateId, Dependency, Edition, FileId, SourceRoot, SourceRootId},
};
pub use ::salsa;
pub trait CheckCanceled {
/// Aborts current query if there are pending changes.
///
/// rust-analyzer needs to be able to answer semantic questions about the
/// code while the code is being modified. A common problem is that a
/// long-running query is being calculated when a new change arrives.
///
/// We can't just apply the change immediately: this will cause the pending
/// query to see inconsistent state (it will observe an absence of
/// repeatable read). So what we do is we **cancel** all pending queries
/// before applying the change.
///
/// We implement cancellation by panicking with a special value and catching
/// it on the API boundary. Salsa explicitly supports this use-case.
fn check_canceled(&self);
fn catch_canceled<F, T>(&self, f: F) -> Result<T, Canceled>
where
Self: Sized,
F: FnOnce(&Self) -> T + panic::UnwindSafe,
{
let this = panic::AssertUnwindSafe(self);
panic::catch_unwind(|| f(*this)).map_err(|err| match err.downcast::<Canceled>() {
Ok(canceled) => *canceled,
Err(payload) => panic::resume_unwind(payload),
})
}
}
impl<T: salsa::Database> CheckCanceled for T {
fn check_canceled(&self) {
if self.salsa_runtime().is_current_revision_canceled() {
Canceled::throw()
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
pub offset: TextUnit,
}
#[derive(Clone, Copy, Debug)]
pub struct FileRange {
pub file_id: FileId,
pub range: TextRange,
}
pub const DEFAULT_LRU_CAP: usize = 128;
/// Database which stores all significant input facts: source code and project
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: CheckCanceled + std::fmt::Debug {
/// Text of the file.
#[salsa::input]
fn file_text(&self, file_id: FileId) -> Arc<String>;
// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
fn parse(&self, file_id: FileId) -> Parse;
/// Path to a file, relative to the root of its source root.
#[salsa::input]
fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf;
/// Source root of the file.
#[salsa::input]
fn file_source_root(&self, file_id: FileId) -> SourceRootId;
/// Contents of the source root.
#[salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>;
/// The crate graph.
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
}
fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> {
let root = db.source_root(id);
let graph = db.crate_graph();
let res =
root.files.values().filter_map(|&it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>();
Arc::new(res)
}
fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse {
let _p = profile("parse_query");
let text = db.file_text(file_id);
SourceFile::parse(&*text)
}
| 33.745283 | 100 | 0.665642 |
228eb878154ab697dbe262caa3e8134bed875f2a | 2,648 | use std::sync::Arc;
use crate::concurrency::{ThreadPool, ThreadPoolContext, ThreadPoolContextCreator};
/// A spatio-temporal rectangle for querying data
use geoengine_datatypes::primitives::{
AxisAlignedRectangle, BoundingBox2D, SpatialPartition2D, SpatialPartitioned, SpatialResolution,
TimeInterval,
};
use serde::{Deserialize, Serialize};
/// A spatio-temporal rectangle for querying data with a bounding box
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct QueryRectangle<SpatialBounds: AxisAlignedRectangle> {
pub spatial_bounds: SpatialBounds,
pub time_interval: TimeInterval,
pub spatial_resolution: SpatialResolution,
}
pub type VectorQueryRectangle = QueryRectangle<BoundingBox2D>;
pub type RasterQueryRectangle = QueryRectangle<SpatialPartition2D>;
pub type PlotQueryRectangle = QueryRectangle<BoundingBox2D>;
impl SpatialPartitioned for VectorQueryRectangle {
fn spatial_partition(&self) -> SpatialPartition2D {
SpatialPartition2D::with_bbox_and_resolution(self.spatial_bounds, self.spatial_resolution)
}
}
impl SpatialPartitioned for RasterQueryRectangle {
fn spatial_partition(&self) -> SpatialPartition2D {
self.spatial_bounds
}
}
impl From<VectorQueryRectangle> for RasterQueryRectangle {
fn from(value: VectorQueryRectangle) -> Self {
Self {
spatial_bounds: value.spatial_partition(),
time_interval: value.time_interval,
spatial_resolution: value.spatial_resolution,
}
}
}
pub trait QueryContext: Send + Sync {
fn chunk_byte_size(&self) -> usize;
fn thread_pool_context(&self) -> &ThreadPoolContext;
}
pub struct MockQueryContext {
pub chunk_byte_size: usize,
pub thread_pool: ThreadPoolContext,
}
impl Default for MockQueryContext {
fn default() -> Self {
Self {
chunk_byte_size: 1024 * 1024,
thread_pool: Arc::new(ThreadPool::default()).create_context(),
}
}
}
impl MockQueryContext {
pub fn new(chunk_byte_size: usize) -> Self {
Self {
chunk_byte_size,
..Default::default()
}
}
pub fn with_chunk_size_and_thread_count(chunk_byte_size: usize, num_threads: usize) -> Self {
Self {
chunk_byte_size,
thread_pool: Arc::new(ThreadPool::new(num_threads)).create_context(),
}
}
}
impl QueryContext for MockQueryContext {
fn chunk_byte_size(&self) -> usize {
self.chunk_byte_size
}
fn thread_pool_context(&self) -> &ThreadPoolContext {
&self.thread_pool
}
}
| 29.098901 | 99 | 0.702795 |
29dd811b88c3a3ae7da9189aa71424899246c858 | 2,246 | mod bin_length;
mod byte_order;
mod concat;
mod env;
mod from_file;
mod numeric;
mod repeat_delim;
mod select;
mod sequence;
mod strings;
mod to_string;
use crate::interpreter::Module;
use crate::BuiltinFunctionPrototype;
const BUILTIN_FNS: &'static [&'static BuiltinFunctionPrototype] = &[
self::bin_length::BIN_LENGTH,
self::byte_order::UINT_LITTLE_ENDIAN,
self::byte_order::UINT_BIG_ENDIAN,
self::byte_order::INT_LITTLE_ENDIAN,
self::byte_order::INT_BIG_ENDIAN,
self::byte_order::DECIMAL_LITTLE_ENDIAN,
self::byte_order::DECIMAL_BIG_ENDIAN,
self::env::ENV_VAR,
self::strings::STRING_GEN_BUILTIN,
self::strings::STRING_LENGTH_BUILTIN,
self::strings::STRING_ENCODE_BUILTIN,
self::concat::CONCAT_BUILTIN,
self::concat::CONCAT_BIN_BUILTIN,
self::select::SELECT_STRING_BUILTIN,
self::select::SELECT_BOOLEAN_BUILTIN,
self::select::SELECT_DECIMAL_BUILTIN,
self::select::SELECT_UINT_BUILTIN,
self::select::SELECT_INT_BUILTIN,
self::select::SELECT_BIN_BUILTIN,
self::select::STABLE_SELECT_STRING_BUILTIN,
self::select::STABLE_SELECT_BOOLEAN_BUILTIN,
self::select::STABLE_SELECT_DECIMAL_BUILTIN,
self::select::STABLE_SELECT_UINT_BUILTIN,
self::select::STABLE_SELECT_INT_BUILTIN,
self::select::STABLE_SELECT_BIN_BUILTIN,
self::repeat_delim::REPEAT_DELIM_BUILTIN,
self::repeat_delim::REPEAT_DELIM_BIN_BUILTIN,
self::numeric::UINT_BUILTIN,
self::numeric::INT_BUILTIN,
self::numeric::DECIMAL_BUILTIN,
self::from_file::WORDS_BUILTIN,
self::from_file::SELECT_FROM_FILE_BUILTIN,
self::to_string::BOOLEAN_TO_STRING_BUILTIN,
self::to_string::DECIMAL_TO_STRING_BUILTIN,
self::to_string::INT_TO_STRING_BUILTIN,
self::to_string::UINT_TO_STRING_BUILTIN,
self::sequence::STRING_WRAPPING_SEQ,
self::sequence::STRING_SEQ,
self::sequence::BIN_WRAPPING_SEQ,
self::sequence::BIN_SEQ,
self::sequence::UINT_WRAPPING_SEQ,
self::sequence::UINT_SEQ,
self::sequence::INT_WRAPPING_SEQ,
self::sequence::INT_SEQ,
self::sequence::DECIMAL_WRAPPING_SEQ,
self::sequence::DECIMAL_SEQ,
];
pub fn get_default_builtins_module() -> Module {
Module::new_builtin(BUILTIN_FNS.iter().map(|fun| *fun))
} | 33.029412 | 68 | 0.750668 |
79a5719e7c6e512ff21c278239b0d2c0c723a024 | 1,435 | bracket_terminal::add_wasm_support!();
use bracket_terminal::prelude::*;
struct State {
x: f32,
}
impl GameState for State {
fn tick(&mut self, ctx: &mut BTerm) {
let mut draw_batch = DrawBatch::new();
draw_batch.target(1);
draw_batch.cls();
let simple_x = self.x as i32;
let fancy_x = self.x + 20.0;
draw_batch.print(Point::new(0, 0), format!("Simple Console"));
draw_batch.print(Point::new(0, 1), format!("X={}", simple_x));
draw_batch.print(Point::new(20, 0), format!("Fancy Console"));
draw_batch.print(Point::new(20, 1), format!("X={:2}", fancy_x));
draw_batch.print(Point::new(simple_x, 3), "@");
draw_batch.set_fancy(
PointF::new(fancy_x, 4.0),
1,
Degrees::new(0.0),
PointF::new(1.0, 1.0),
ColorPair::new(WHITE,BLACK),
to_cp437('@')
);
draw_batch.submit(0).expect("Batch error");
render_draw_buffer(ctx).expect("Render error");
self.x += 0.05;
if self.x > 10.0 {
self.x = 0.0;
}
}
}
fn main() -> BError {
let context = BTermBuilder::simple80x50()
.with_fancy_console(80, 50, "terminal8x8.png")
.with_title("Bracket Terminal - Fancy Consoles")
.with_fps_cap(30.0)
.build()?;
let gs = State {
x: 0.0,
};
main_loop(context, gs)
}
| 24.741379 | 72 | 0.54007 |
e5ec7bce8038bd2200d5262dd164e25fab4f3df5 | 728 | use std::num::Wrapping;
use zerocopy::AsBytes;
#[derive(Debug, Copy, Clone, PartialOrd, PartialEq, AsBytes)]
#[repr(packed)]
pub struct Pixel {
pub r: u8,
pub g: u8,
pub b: u8,
}
impl Pixel {
pub fn wrapping_sum(&self) -> Wrapping<u8> {
Wrapping(self.r) + Wrapping(self.g) + Wrapping(self.b)
}
}
impl std::ops::Add for Pixel {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Pixel {
r: self.r + rhs.r,
g: self.g + rhs.g,
b: self.b + rhs.b,
}
}
}
impl num_traits::Zero for Pixel {
fn zero() -> Self {
Pixel { r: 0, g: 0, b: 0 }
}
fn is_zero(&self) -> bool {
*self == Self::zero()
}
}
| 18.2 | 62 | 0.513736 |
1c19297429963976df95b6be8ca6086212c97780 | 1,074 | use hello_world::greeter_client::GreeterClient;
use hello_world::HelloRequest;
use tonic_ws_transport::WsConnector;
use wasm_bindgen::prelude::*;
pub mod hello_world {
tonic::include_proto!("helloworld");
}
#[wasm_bindgen(start)]
pub fn main() -> Result<(), JsValue> {
console_error_panic_hook::set_once();
wasm_logger::init(wasm_logger::Config::new(log::Level::Debug));
Ok(())
}
#[wasm_bindgen]
pub async fn say_hello() -> String {
const URL: &str = "ws://127.0.0.1:3012";
let endpoint = tonic::transport::Endpoint::from_static(URL);
let channel = endpoint
.connect_with_connector(WsConnector::new())
.await
.expect("failed to connect");
log::info!("Connected to {}", URL);
let mut client = GreeterClient::new(channel);
let request = tonic::Request::new(HelloRequest {
name: "Tonic".into(),
});
log::info!("REQUEST={:?}", request);
let response = client.say_hello(request).await.expect("RPC call failed");
log::info!("RESPONSE={:?}", response);
format!("{:?}", response)
}
| 26.85 | 77 | 0.647114 |
29146305dbceaface0e723530b42772ae48d11af | 2,313 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use common_base::tokio;
use common_datavalues::prelude::*;
use common_exception::Result;
use common_planners::*;
use futures::TryStreamExt;
use super::NumbersTable;
#[tokio::test]
async fn test_number_table() -> Result<()> {
let tbl_args = Some(vec![Expression::create_literal(DataValue::UInt64(Some(8)))]);
let ctx = crate::tests::try_create_context()?;
let table = NumbersTable::create("system", "numbers_mt", 1, tbl_args)?;
let scan = &ScanPlan {
schema_name: "scan_test".to_string(),
table_id: 0,
table_version: None,
table_schema: DataSchemaRefExt::create(vec![]),
table_args: Some(Expression::create_literal(DataValue::UInt64(Some(8)))),
projected_schema: DataSchemaRefExt::create(vec![DataField::new(
"number",
DataType::UInt64,
false,
)]),
push_downs: Extras::default(),
};
let partitions = ctx.get_settings().get_max_threads()? as usize;
let source_plan =
table.read_plan(ctx.clone(), Some(scan.push_downs.clone()), Some(partitions))?;
ctx.try_set_partitions(source_plan.parts.clone())?;
let stream = table.read(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 1);
let expected = vec![
"+--------+",
"| number |",
"+--------+",
"| 0 |",
"| 1 |",
"| 2 |",
"| 3 |",
"| 4 |",
"| 5 |",
"| 6 |",
"| 7 |",
"+--------+",
];
common_datablocks::assert_blocks_sorted_eq(expected, result.as_slice());
Ok(())
}
| 32.577465 | 87 | 0.602681 |
119b78921606b4d807633f3b326b6f981be64710 | 13,513 | use crate::event::{self, Event};
use crate::layout;
use crate::mouse;
use crate::overlay;
use crate::renderer;
use crate::{
Clipboard, Color, Layout, Length, Point, Rectangle, Shell, Widget,
};
/// A generic [`Widget`].
///
/// It is useful to build composable user interfaces that do not leak
/// implementation details in their __view logic__.
///
/// If you have a [built-in widget], you should be able to use `Into<Element>`
/// to turn it into an [`Element`].
///
/// [built-in widget]: widget/index.html#built-in-widgets
#[allow(missing_debug_implementations)]
pub struct Element<'a, Message, Renderer> {
pub(crate) widget: Box<dyn Widget<Message, Renderer> + 'a>,
}
impl<'a, Message, Renderer> Element<'a, Message, Renderer>
where
Renderer: crate::Renderer,
{
/// Creates a new [`Element`] containing the given [`Widget`].
pub fn new(
widget: impl Widget<Message, Renderer> + 'a,
) -> Element<'a, Message, Renderer> {
Element {
widget: Box::new(widget),
}
}
/// Applies a transformation to the produced message of the [`Element`].
///
/// This method is useful when you want to decouple different parts of your
/// UI and make them __composable__.
///
/// # Example
/// Imagine we want to use [our counter](index.html#usage). But instead of
/// showing a single counter, we want to display many of them. We can reuse
/// the `Counter` type as it is!
///
/// We use composition to model the __state__ of our new application:
///
/// ```
/// # mod counter {
/// # pub struct Counter;
/// # }
/// use counter::Counter;
///
/// struct ManyCounters {
/// counters: Vec<Counter>,
/// }
/// ```
///
/// We can store the state of multiple counters now. However, the
/// __messages__ we implemented before describe the user interactions
/// of a __single__ counter. Right now, we need to also identify which
/// counter is receiving user interactions. Can we use composition again?
/// Yes.
///
/// ```
/// # mod counter {
/// # #[derive(Debug, Clone, Copy)]
/// # pub enum Message {}
/// # }
/// #[derive(Debug, Clone, Copy)]
/// pub enum Message {
/// Counter(usize, counter::Message)
/// }
/// ```
///
/// We compose the previous __messages__ with the index of the counter
/// producing them. Let's implement our __view logic__ now:
///
/// ```
/// # mod counter {
/// # type Text = iced_native::widget::Text<iced_native::renderer::Null>;
/// #
/// # #[derive(Debug, Clone, Copy)]
/// # pub enum Message {}
/// # pub struct Counter;
/// #
/// # impl Counter {
/// # pub fn view(&mut self) -> Text {
/// # Text::new("")
/// # }
/// # }
/// # }
/// #
/// # mod iced_wgpu {
/// # pub use iced_native::renderer::Null as Renderer;
/// # }
/// #
/// # use counter::Counter;
/// #
/// # struct ManyCounters {
/// # counters: Vec<Counter>,
/// # }
/// #
/// # #[derive(Debug, Clone, Copy)]
/// # pub enum Message {
/// # Counter(usize, counter::Message)
/// # }
/// use iced_native::Element;
/// use iced_native::widget::Row;
/// use iced_wgpu::Renderer;
///
/// impl ManyCounters {
/// pub fn view(&mut self) -> Row<Message, Renderer> {
/// // We can quickly populate a `Row` by folding over our counters
/// self.counters.iter_mut().enumerate().fold(
/// Row::new().spacing(20),
/// |row, (index, counter)| {
/// // We display the counter
/// let element: Element<counter::Message, Renderer> =
/// counter.view().into();
///
/// row.push(
/// // Here we turn our `Element<counter::Message>` into
/// // an `Element<Message>` by combining the `index` and the
/// // message of the `element`.
/// element.map(move |message| Message::Counter(index, message))
/// )
/// }
/// )
/// }
/// }
/// ```
///
/// Finally, our __update logic__ is pretty straightforward: simple
/// delegation.
///
/// ```
/// # mod counter {
/// # #[derive(Debug, Clone, Copy)]
/// # pub enum Message {}
/// # pub struct Counter;
/// #
/// # impl Counter {
/// # pub fn update(&mut self, _message: Message) {}
/// # }
/// # }
/// #
/// # use counter::Counter;
/// #
/// # struct ManyCounters {
/// # counters: Vec<Counter>,
/// # }
/// #
/// # #[derive(Debug, Clone, Copy)]
/// # pub enum Message {
/// # Counter(usize, counter::Message)
/// # }
/// impl ManyCounters {
/// pub fn update(&mut self, message: Message) {
/// match message {
/// Message::Counter(index, counter_msg) => {
/// if let Some(counter) = self.counters.get_mut(index) {
/// counter.update(counter_msg);
/// }
/// }
/// }
/// }
/// }
/// ```
pub fn map<F, B>(self, f: F) -> Element<'a, B, Renderer>
where
Message: 'static,
Renderer: 'a,
B: 'static,
F: 'static + Fn(Message) -> B,
{
Element {
widget: Box::new(Map::new(self.widget, f)),
}
}
/// Marks the [`Element`] as _to-be-explained_.
///
/// The [`Renderer`] will explain the layout of the [`Element`] graphically.
/// This can be very useful for debugging your layout!
///
/// [`Renderer`]: crate::Renderer
pub fn explain<C: Into<Color>>(
self,
color: C,
) -> Element<'a, Message, Renderer>
where
Message: 'static,
Renderer: 'a,
{
Element {
widget: Box::new(Explain::new(self, color.into())),
}
}
/// Returns the width of the [`Element`].
pub fn width(&self) -> Length {
self.widget.width()
}
/// Returns the height of the [`Element`].
pub fn height(&self) -> Length {
self.widget.height()
}
/// Computes the layout of the [`Element`] in the given [`Limits`].
///
/// [`Limits`]: layout::Limits
pub fn layout(
&self,
renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
self.widget.layout(renderer, limits)
}
/// Processes a runtime [`Event`].
pub fn on_event(
&mut self,
event: Event,
layout: Layout<'_>,
cursor_position: Point,
renderer: &Renderer,
clipboard: &mut dyn Clipboard,
shell: &mut Shell<'_, Message>,
) -> event::Status {
self.widget.on_event(
event,
layout,
cursor_position,
renderer,
clipboard,
shell,
)
}
/// Draws the [`Element`] and its children using the given [`Layout`].
pub fn draw(
&self,
renderer: &mut Renderer,
style: &renderer::Style,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) {
self.widget
.draw(renderer, style, layout, cursor_position, viewport)
}
/// Returns the current [`mouse::Interaction`] of the [`Element`].
pub fn mouse_interaction(
&self,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
renderer: &Renderer,
) -> mouse::Interaction {
self.widget.mouse_interaction(
layout,
cursor_position,
viewport,
renderer,
)
}
/// Returns the overlay of the [`Element`], if there is any.
pub fn overlay<'b>(
&'b mut self,
layout: Layout<'_>,
renderer: &Renderer,
) -> Option<overlay::Element<'b, Message, Renderer>> {
self.widget.overlay(layout, renderer)
}
}
struct Map<'a, A, B, Renderer> {
widget: Box<dyn Widget<A, Renderer> + 'a>,
mapper: Box<dyn Fn(A) -> B>,
}
impl<'a, A, B, Renderer> Map<'a, A, B, Renderer> {
pub fn new<F>(
widget: Box<dyn Widget<A, Renderer> + 'a>,
mapper: F,
) -> Map<'a, A, B, Renderer>
where
F: 'static + Fn(A) -> B,
{
Map {
widget,
mapper: Box::new(mapper),
}
}
}
impl<'a, A, B, Renderer> Widget<B, Renderer> for Map<'a, A, B, Renderer>
where
Renderer: crate::Renderer + 'a,
A: 'static,
B: 'static,
{
fn width(&self) -> Length {
self.widget.width()
}
fn height(&self) -> Length {
self.widget.height()
}
fn layout(
&self,
renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
self.widget.layout(renderer, limits)
}
fn on_event(
&mut self,
event: Event,
layout: Layout<'_>,
cursor_position: Point,
renderer: &Renderer,
clipboard: &mut dyn Clipboard,
shell: &mut Shell<'_, B>,
) -> event::Status {
let mut local_messages = Vec::new();
let mut local_shell = Shell::new(&mut local_messages);
let status = self.widget.on_event(
event,
layout,
cursor_position,
renderer,
clipboard,
&mut local_shell,
);
shell.merge(local_shell, &self.mapper);
status
}
fn draw(
&self,
renderer: &mut Renderer,
style: &renderer::Style,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) {
self.widget
.draw(renderer, style, layout, cursor_position, viewport)
}
fn mouse_interaction(
&self,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
renderer: &Renderer,
) -> mouse::Interaction {
self.widget.mouse_interaction(
layout,
cursor_position,
viewport,
renderer,
)
}
fn overlay(
&mut self,
layout: Layout<'_>,
renderer: &Renderer,
) -> Option<overlay::Element<'_, B, Renderer>> {
let mapper = &self.mapper;
self.widget
.overlay(layout, renderer)
.map(move |overlay| overlay.map(mapper))
}
}
struct Explain<'a, Message, Renderer: crate::Renderer> {
element: Element<'a, Message, Renderer>,
color: Color,
}
impl<'a, Message, Renderer> Explain<'a, Message, Renderer>
where
Renderer: crate::Renderer,
{
fn new(element: Element<'a, Message, Renderer>, color: Color) -> Self {
Explain { element, color }
}
}
impl<'a, Message, Renderer> Widget<Message, Renderer>
for Explain<'a, Message, Renderer>
where
Renderer: crate::Renderer,
{
fn width(&self) -> Length {
self.element.widget.width()
}
fn height(&self) -> Length {
self.element.widget.height()
}
fn layout(
&self,
renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
self.element.widget.layout(renderer, limits)
}
fn on_event(
&mut self,
event: Event,
layout: Layout<'_>,
cursor_position: Point,
renderer: &Renderer,
clipboard: &mut dyn Clipboard,
shell: &mut Shell<'_, Message>,
) -> event::Status {
self.element.widget.on_event(
event,
layout,
cursor_position,
renderer,
clipboard,
shell,
)
}
fn draw(
&self,
renderer: &mut Renderer,
style: &renderer::Style,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) {
fn explain_layout<Renderer: crate::Renderer>(
renderer: &mut Renderer,
color: Color,
layout: Layout<'_>,
) {
renderer.fill_quad(
renderer::Quad {
bounds: layout.bounds(),
border_color: color,
border_width: 1.0,
border_radius: 0.0,
},
Color::TRANSPARENT,
);
for child in layout.children() {
explain_layout(renderer, color, child);
}
}
self.element.widget.draw(
renderer,
style,
layout,
cursor_position,
viewport,
);
explain_layout(renderer, self.color, layout);
}
fn mouse_interaction(
&self,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
renderer: &Renderer,
) -> mouse::Interaction {
self.element.widget.mouse_interaction(
layout,
cursor_position,
viewport,
renderer,
)
}
fn overlay(
&mut self,
layout: Layout<'_>,
renderer: &Renderer,
) -> Option<overlay::Element<'_, Message, Renderer>> {
self.element.overlay(layout, renderer)
}
}
| 26.705534 | 88 | 0.505143 |
9b567d25536058246eaa422052dcaf73919826bd | 1,397 | // This is free and unencumbered software released into the public domain.
//
// Anyone is free to copy, modify, publish, use, compile, sell, or
// distribute this software, either in source code form or as a compiled
// binary, for any purpose, commercial or non-commercial, and by any
// means.
//
// In jurisdictions that recognize copyright laws, the author or authors
// of this software dedicate any and all copyright interest in the
// software to the public domain. We make this dedication for the benefit
// of the public at large and to the detriment of our heirs and
// successors. We intend this dedication to be an overt act of
// relinquishment in perpetuity of all present and future rights to this
// software under copyright law.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
//
// For more information, please refer to <https://unlicense.org>
use crate::vpn::channel::types::Channel;
pub type TcpLayerChannel = Channel<([u8; 4], u16, [u8; 4], u16, Vec<u8>)>;
| 48.172414 | 74 | 0.750895 |
01c7118ba5c7a72d8ffabcb9da9ea9bc2ba57cbe | 3,436 | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::collections::HashMap;
use std::hash::{self, BuildHasher, Hash, Hasher};
use lazy_static::lazy_static;
use crate::core::{Key, TypeId, Value, FNV};
use crate::externs::{self, Ident};
///
/// A struct that encapsulates interning of python `Value`s as comparable `Key`s.
///
/// To minimize the total amount of time spent in python code comparing objects (represented on
/// the rust side of the FFI boundary as `Value` instances) to one another, this API supports
/// memoizing `Value`s as `Key`s.
///
/// Creating a `Key` involves interning a `Value` under a (private) `InternKey` struct which
/// implements `Hash` and `Eq` using the precomputed python `__hash__` for the `Value` and
/// delegating to python's `__eq__`, respectively.
///
/// Currently `Value`s are interned indefinitely as `Key`s, meaning that they can never
/// be collected: it's possible that this can eventually be improved by either:
///
/// 1) switching to directly linking-against or embedding python, such that the `Value`
/// type goes away in favor of direct usage of a python object wrapper struct.
/// 2) This structure might begin storing weak-references to `Key`s and/or `Value`s, which
/// would allow the associated `Value` handles to be dropped when they were no longer used.
/// The challenge to this approach is that it would make it more difficult to pass
/// `Key`/`Value` instances across the FFI boundary.
/// 3) `Value` could implement `Eq`/`Hash` directly via extern calls to python (although we've
/// avoided doing this so far because it would hide a relatively expensive operation behind
/// those usually-inexpensive traits).
///
#[derive(Default)]
pub struct Interns {
forward: HashMap<InternKey, Key, FNV>,
reverse: HashMap<Key, Value, FNV>,
id_generator: u64,
}
lazy_static! {
static ref PRODUCT_TYPE_ID_HASH_BUILDER: FNV = FNV::default();
}
impl Interns {
pub fn new() -> Interns {
Interns::default()
}
fn perform_insert(&mut self, v: Value, hash: i64, type_id: TypeId) -> Key {
let mut inserted = false;
let id_generator = self.id_generator;
let key = *self
.forward
.entry(InternKey(hash, v.clone()))
.or_insert_with(|| {
inserted = true;
Key::new(id_generator, type_id)
});
if inserted {
self.reverse.insert(key, v);
self.id_generator += 1;
}
key
}
pub fn insert_product(&mut self, v: Value) -> Key {
let type_id = externs::product_type(&v);
let mut hasher = PRODUCT_TYPE_ID_HASH_BUILDER.build_hasher();
type_id.hash(&mut hasher);
let hash: i64 = hasher.finish() as i64;
self.perform_insert(v, hash, type_id)
}
pub fn insert(&mut self, v: Value) -> Key {
let Ident { hash, type_id } = externs::identify(&v);
self.perform_insert(v, hash, type_id)
}
pub fn get(&self, k: &Key) -> &Value {
self
.reverse
.get(&k)
.unwrap_or_else(|| panic!("Previously memoized object disappeared for {:?}", k))
}
}
struct InternKey(i64, Value);
impl Eq for InternKey {}
impl PartialEq for InternKey {
fn eq(&self, other: &InternKey) -> bool {
externs::equals(&self.1, &other.1)
}
}
impl hash::Hash for InternKey {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
| 32.72381 | 96 | 0.672584 |
e59d21e7fcef06e42b0f06d5f6115dd296ad06c9 | 35,981 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! This module provides ScalarValue, an enum that can be used for storage of single elements
use std::{convert::TryFrom, fmt, iter::repeat, sync::Arc};
use arrow::datatypes::{ArrowDictionaryKeyType, DataType, Field, IntervalUnit, TimeUnit};
use arrow::{
array::*,
datatypes::{
ArrowNativeType, Float32Type, Int16Type, Int32Type, Int64Type, Int8Type,
TimestampNanosecondType, UInt16Type, UInt32Type, UInt64Type, UInt8Type,
},
};
use arrow::{
array::{
ArrayRef, Int16Builder, Int32Builder, Int64Builder, Int8Builder, ListBuilder,
TimestampMicrosecondArray, TimestampMillisecondArray, TimestampNanosecondArray,
UInt16Builder, UInt32Builder, UInt64Builder, UInt8Builder,
},
datatypes::{
TimestampMicrosecondType, TimestampMillisecondType, TimestampSecondType,
},
};
use crate::error::{DataFusionError, Result};
/// Represents a dynamically typed, nullable single value.
/// This is the single-valued counter-part of arrow’s `Array`.
#[derive(Clone, PartialEq)]
pub enum ScalarValue {
/// true or false value
Boolean(Option<bool>),
/// 32bit float
Float32(Option<f32>),
/// 64bit float
Float64(Option<f64>),
/// signed 8bit int
Int8(Option<i8>),
/// signed 16bit int
Int16(Option<i16>),
/// signed 32bit int
Int32(Option<i32>),
/// signed 64bit int
Int64(Option<i64>),
/// unsigned 8bit int
UInt8(Option<u8>),
/// unsigned 16bit int
UInt16(Option<u16>),
/// unsigned 32bit int
UInt32(Option<u32>),
/// unsigned 64bit int
UInt64(Option<u64>),
/// utf-8 encoded string.
Utf8(Option<String>),
/// utf-8 encoded string representing a LargeString's arrow type.
LargeUtf8(Option<String>),
/// binary
Binary(Option<Vec<u8>>),
/// large binary
LargeBinary(Option<Vec<u8>>),
/// list of nested ScalarValue
List(Option<Vec<ScalarValue>>, DataType),
/// Date stored as a signed 32bit int
Date32(Option<i32>),
/// Date stored as a signed 64bit int
Date64(Option<i64>),
/// Timestamp Second
TimestampSecond(Option<i64>),
/// Timestamp Milliseconds
TimestampMillisecond(Option<i64>),
/// Timestamp Microseconds
TimestampMicrosecond(Option<i64>),
/// Timestamp Nanoseconds
TimestampNanosecond(Option<i64>),
/// Interval with YearMonth unit
IntervalYearMonth(Option<i32>),
/// Interval with DayTime unit
IntervalDayTime(Option<i64>),
}
macro_rules! typed_cast {
($array:expr, $index:expr, $ARRAYTYPE:ident, $SCALAR:ident) => {{
let array = $array.as_any().downcast_ref::<$ARRAYTYPE>().unwrap();
ScalarValue::$SCALAR(match array.is_null($index) {
true => None,
false => Some(array.value($index).into()),
})
}};
}
macro_rules! build_list {
($VALUE_BUILDER_TY:ident, $SCALAR_TY:ident, $VALUES:expr, $SIZE:expr) => {{
match $VALUES {
// the return on the macro is necessary, to short-circuit and return ArrayRef
None => {
return new_null_array(
&DataType::List(Box::new(Field::new(
"item",
DataType::$SCALAR_TY,
true,
))),
$SIZE,
)
}
Some(values) => {
build_values_list!($VALUE_BUILDER_TY, $SCALAR_TY, values, $SIZE)
}
}
}};
}
macro_rules! build_timestamp_list {
($TIME_UNIT:expr, $TIME_ZONE:expr, $VALUES:expr, $SIZE:expr) => {{
match $VALUES {
// the return on the macro is necessary, to short-circuit and return ArrayRef
None => {
return new_null_array(
&DataType::List(Box::new(Field::new(
"item",
DataType::Timestamp($TIME_UNIT, $TIME_ZONE),
true,
))),
$SIZE,
)
}
Some(values) => match $TIME_UNIT {
TimeUnit::Second => build_values_list!(
TimestampSecondBuilder,
TimestampSecond,
values,
$SIZE
),
TimeUnit::Microsecond => build_values_list!(
TimestampMillisecondBuilder,
TimestampMillisecond,
values,
$SIZE
),
TimeUnit::Millisecond => build_values_list!(
TimestampMicrosecondBuilder,
TimestampMicrosecond,
values,
$SIZE
),
TimeUnit::Nanosecond => build_values_list!(
TimestampNanosecondBuilder,
TimestampNanosecond,
values,
$SIZE
),
},
}
}};
}
macro_rules! build_values_list {
($VALUE_BUILDER_TY:ident, $SCALAR_TY:ident, $VALUES:expr, $SIZE:expr) => {{
let mut builder = ListBuilder::new($VALUE_BUILDER_TY::new($VALUES.len()));
for _ in 0..$SIZE {
for scalar_value in $VALUES {
match scalar_value {
ScalarValue::$SCALAR_TY(Some(v)) => {
builder.values().append_value(v.clone()).unwrap()
}
ScalarValue::$SCALAR_TY(None) => {
builder.values().append_null().unwrap();
}
_ => panic!("Incompatible ScalarValue for list"),
};
}
builder.append(true).unwrap();
}
builder.finish()
}};
}
impl ScalarValue {
/// Getter for the `DataType` of the value
pub fn get_datatype(&self) -> DataType {
match self {
ScalarValue::Boolean(_) => DataType::Boolean,
ScalarValue::UInt8(_) => DataType::UInt8,
ScalarValue::UInt16(_) => DataType::UInt16,
ScalarValue::UInt32(_) => DataType::UInt32,
ScalarValue::UInt64(_) => DataType::UInt64,
ScalarValue::Int8(_) => DataType::Int8,
ScalarValue::Int16(_) => DataType::Int16,
ScalarValue::Int32(_) => DataType::Int32,
ScalarValue::Int64(_) => DataType::Int64,
ScalarValue::TimestampSecond(_) => {
DataType::Timestamp(TimeUnit::Second, None)
}
ScalarValue::TimestampMillisecond(_) => {
DataType::Timestamp(TimeUnit::Millisecond, None)
}
ScalarValue::TimestampMicrosecond(_) => {
DataType::Timestamp(TimeUnit::Microsecond, None)
}
ScalarValue::TimestampNanosecond(_) => {
DataType::Timestamp(TimeUnit::Nanosecond, None)
}
ScalarValue::Float32(_) => DataType::Float32,
ScalarValue::Float64(_) => DataType::Float64,
ScalarValue::Utf8(_) => DataType::Utf8,
ScalarValue::LargeUtf8(_) => DataType::LargeUtf8,
ScalarValue::Binary(_) => DataType::Binary,
ScalarValue::LargeBinary(_) => DataType::LargeBinary,
ScalarValue::List(_, data_type) => {
DataType::List(Box::new(Field::new("item", data_type.clone(), true)))
}
ScalarValue::Date32(_) => DataType::Date32,
ScalarValue::Date64(_) => DataType::Date64,
ScalarValue::IntervalYearMonth(_) => {
DataType::Interval(IntervalUnit::YearMonth)
}
ScalarValue::IntervalDayTime(_) => DataType::Interval(IntervalUnit::DayTime),
}
}
/// Calculate arithmetic negation for a scalar value
pub fn arithmetic_negate(&self) -> Self {
match self {
ScalarValue::Boolean(None)
| ScalarValue::Int8(None)
| ScalarValue::Int16(None)
| ScalarValue::Int32(None)
| ScalarValue::Int64(None)
| ScalarValue::Float32(None) => self.clone(),
ScalarValue::Float64(Some(v)) => ScalarValue::Float64(Some(-v)),
ScalarValue::Float32(Some(v)) => ScalarValue::Float32(Some(-v)),
ScalarValue::Int8(Some(v)) => ScalarValue::Int8(Some(-v)),
ScalarValue::Int16(Some(v)) => ScalarValue::Int16(Some(-v)),
ScalarValue::Int32(Some(v)) => ScalarValue::Int32(Some(-v)),
ScalarValue::Int64(Some(v)) => ScalarValue::Int64(Some(-v)),
_ => panic!("Cannot run arithmetic negate on scalar value: {:?}", self),
}
}
/// whether this value is null or not.
pub fn is_null(&self) -> bool {
matches!(
*self,
ScalarValue::Boolean(None)
| ScalarValue::UInt8(None)
| ScalarValue::UInt16(None)
| ScalarValue::UInt32(None)
| ScalarValue::UInt64(None)
| ScalarValue::Int8(None)
| ScalarValue::Int16(None)
| ScalarValue::Int32(None)
| ScalarValue::Int64(None)
| ScalarValue::Float32(None)
| ScalarValue::Float64(None)
| ScalarValue::Utf8(None)
| ScalarValue::LargeUtf8(None)
| ScalarValue::List(None, _)
| ScalarValue::TimestampMillisecond(None)
| ScalarValue::TimestampMicrosecond(None)
| ScalarValue::TimestampNanosecond(None)
)
}
/// Converts a scalar value into an 1-row array.
pub fn to_array(&self) -> ArrayRef {
self.to_array_of_size(1)
}
/// Converts a scalar value into an array of `size` rows.
pub fn to_array_of_size(&self, size: usize) -> ArrayRef {
match self {
ScalarValue::Boolean(e) => {
Arc::new(BooleanArray::from(vec![*e; size])) as ArrayRef
}
ScalarValue::Float64(e) => match e {
Some(value) => Arc::new(Float64Array::from_value(*value, size)),
None => new_null_array(&DataType::Float64, size),
},
ScalarValue::Float32(e) => match e {
Some(value) => Arc::new(Float32Array::from_value(*value, size)),
None => new_null_array(&DataType::Float32, size),
},
ScalarValue::Int8(e) => match e {
Some(value) => Arc::new(Int8Array::from_value(*value, size)),
None => new_null_array(&DataType::Int8, size),
},
ScalarValue::Int16(e) => match e {
Some(value) => Arc::new(Int16Array::from_value(*value, size)),
None => new_null_array(&DataType::Int16, size),
},
ScalarValue::Int32(e) => match e {
Some(value) => Arc::new(Int32Array::from_value(*value, size)),
None => new_null_array(&DataType::Int32, size),
},
ScalarValue::Int64(e) => match e {
Some(value) => Arc::new(Int64Array::from_value(*value, size)),
None => new_null_array(&DataType::Int64, size),
},
ScalarValue::UInt8(e) => match e {
Some(value) => Arc::new(UInt8Array::from_value(*value, size)),
None => new_null_array(&DataType::UInt8, size),
},
ScalarValue::UInt16(e) => match e {
Some(value) => Arc::new(UInt16Array::from_value(*value, size)),
None => new_null_array(&DataType::UInt16, size),
},
ScalarValue::UInt32(e) => match e {
Some(value) => Arc::new(UInt32Array::from_value(*value, size)),
None => new_null_array(&DataType::UInt32, size),
},
ScalarValue::UInt64(e) => match e {
Some(value) => Arc::new(UInt64Array::from_value(*value, size)),
None => new_null_array(&DataType::UInt64, size),
},
ScalarValue::TimestampSecond(e) => match e {
Some(value) => Arc::new(TimestampSecondArray::from_iter_values(
repeat(*value).take(size),
)),
None => {
new_null_array(&DataType::Timestamp(TimeUnit::Second, None), size)
}
},
ScalarValue::TimestampMillisecond(e) => match e {
Some(value) => Arc::new(TimestampMillisecondArray::from_iter_values(
repeat(*value).take(size),
)),
None => new_null_array(
&DataType::Timestamp(TimeUnit::Millisecond, None),
size,
),
},
ScalarValue::TimestampMicrosecond(e) => match e {
Some(value) => {
Arc::new(TimestampMicrosecondArray::from_value(*value, size))
}
None => new_null_array(
&DataType::Timestamp(TimeUnit::Microsecond, None),
size,
),
},
ScalarValue::TimestampNanosecond(e) => match e {
Some(value) => {
Arc::new(TimestampNanosecondArray::from_value(*value, size))
}
None => {
new_null_array(&DataType::Timestamp(TimeUnit::Nanosecond, None), size)
}
},
ScalarValue::Utf8(e) => match e {
Some(value) => {
Arc::new(StringArray::from_iter_values(repeat(value).take(size)))
}
None => new_null_array(&DataType::Utf8, size),
},
ScalarValue::LargeUtf8(e) => match e {
Some(value) => {
Arc::new(LargeStringArray::from_iter_values(repeat(value).take(size)))
}
None => new_null_array(&DataType::LargeUtf8, size),
},
ScalarValue::Binary(e) => match e {
Some(value) => Arc::new(
repeat(Some(value.as_slice()))
.take(size)
.collect::<BinaryArray>(),
),
None => {
Arc::new(repeat(None::<&str>).take(size).collect::<BinaryArray>())
}
},
ScalarValue::LargeBinary(e) => match e {
Some(value) => Arc::new(
repeat(Some(value.as_slice()))
.take(size)
.collect::<LargeBinaryArray>(),
),
None => Arc::new(
repeat(None::<&str>)
.take(size)
.collect::<LargeBinaryArray>(),
),
},
ScalarValue::List(values, data_type) => Arc::new(match data_type {
DataType::Boolean => build_list!(BooleanBuilder, Boolean, values, size),
DataType::Int8 => build_list!(Int8Builder, Int8, values, size),
DataType::Int16 => build_list!(Int16Builder, Int16, values, size),
DataType::Int32 => build_list!(Int32Builder, Int32, values, size),
DataType::Int64 => build_list!(Int64Builder, Int64, values, size),
DataType::UInt8 => build_list!(UInt8Builder, UInt8, values, size),
DataType::UInt16 => build_list!(UInt16Builder, UInt16, values, size),
DataType::UInt32 => build_list!(UInt32Builder, UInt32, values, size),
DataType::UInt64 => build_list!(UInt64Builder, UInt64, values, size),
DataType::Utf8 => build_list!(StringBuilder, Utf8, values, size),
DataType::Float32 => build_list!(Float32Builder, Float32, values, size),
DataType::Float64 => build_list!(Float64Builder, Float64, values, size),
DataType::Timestamp(unit, tz) => {
build_timestamp_list!(unit.clone(), tz.clone(), values, size)
}
DataType::LargeUtf8 => {
build_list!(LargeStringBuilder, LargeUtf8, values, size)
}
dt => panic!("Unexpected DataType for list {:?}", dt),
}),
ScalarValue::Date32(e) => match e {
Some(value) => Arc::new(Date32Array::from_value(*value, size)),
None => new_null_array(&DataType::Date32, size),
},
ScalarValue::Date64(e) => match e {
Some(value) => Arc::new(Date64Array::from_value(*value, size)),
None => new_null_array(&DataType::Date64, size),
},
ScalarValue::IntervalDayTime(e) => match e {
Some(value) => Arc::new(IntervalDayTimeArray::from_value(*value, size)),
None => new_null_array(&DataType::Interval(IntervalUnit::DayTime), size),
},
ScalarValue::IntervalYearMonth(e) => match e {
Some(value) => Arc::new(IntervalYearMonthArray::from_value(*value, size)),
None => {
new_null_array(&DataType::Interval(IntervalUnit::YearMonth), size)
}
},
}
}
/// Converts a value in `array` at `index` into a ScalarValue
pub fn try_from_array(array: &ArrayRef, index: usize) -> Result<Self> {
Ok(match array.data_type() {
DataType::Boolean => typed_cast!(array, index, BooleanArray, Boolean),
DataType::Float64 => typed_cast!(array, index, Float64Array, Float64),
DataType::Float32 => typed_cast!(array, index, Float32Array, Float32),
DataType::UInt64 => typed_cast!(array, index, UInt64Array, UInt64),
DataType::UInt32 => typed_cast!(array, index, UInt32Array, UInt32),
DataType::UInt16 => typed_cast!(array, index, UInt16Array, UInt16),
DataType::UInt8 => typed_cast!(array, index, UInt8Array, UInt8),
DataType::Int64 => typed_cast!(array, index, Int64Array, Int64),
DataType::Int32 => typed_cast!(array, index, Int32Array, Int32),
DataType::Int16 => typed_cast!(array, index, Int16Array, Int16),
DataType::Int8 => typed_cast!(array, index, Int8Array, Int8),
DataType::Utf8 => typed_cast!(array, index, StringArray, Utf8),
DataType::LargeUtf8 => typed_cast!(array, index, LargeStringArray, LargeUtf8),
DataType::List(nested_type) => {
let list_array =
array.as_any().downcast_ref::<ListArray>().ok_or_else(|| {
DataFusionError::Internal(
"Failed to downcast ListArray".to_string(),
)
})?;
let value = match list_array.is_null(index) {
true => None,
false => {
let nested_array = list_array.value(index);
let scalar_vec = (0..nested_array.len())
.map(|i| ScalarValue::try_from_array(&nested_array, i))
.collect::<Result<Vec<_>>>()?;
Some(scalar_vec)
}
};
ScalarValue::List(value, nested_type.data_type().clone())
}
DataType::Date32 => {
typed_cast!(array, index, Date32Array, Date32)
}
DataType::Date64 => {
typed_cast!(array, index, Date64Array, Date64)
}
DataType::Timestamp(TimeUnit::Second, _) => {
typed_cast!(array, index, TimestampSecondArray, TimestampSecond)
}
DataType::Timestamp(TimeUnit::Millisecond, _) => {
typed_cast!(
array,
index,
TimestampMillisecondArray,
TimestampMillisecond
)
}
DataType::Timestamp(TimeUnit::Microsecond, _) => {
typed_cast!(
array,
index,
TimestampMicrosecondArray,
TimestampMicrosecond
)
}
DataType::Timestamp(TimeUnit::Nanosecond, _) => {
typed_cast!(array, index, TimestampNanosecondArray, TimestampNanosecond)
}
DataType::Dictionary(index_type, _) => match **index_type {
DataType::Int8 => Self::try_from_dict_array::<Int8Type>(array, index)?,
DataType::Int16 => Self::try_from_dict_array::<Int16Type>(array, index)?,
DataType::Int32 => Self::try_from_dict_array::<Int32Type>(array, index)?,
DataType::Int64 => Self::try_from_dict_array::<Int64Type>(array, index)?,
DataType::UInt8 => Self::try_from_dict_array::<UInt8Type>(array, index)?,
DataType::UInt16 => {
Self::try_from_dict_array::<UInt16Type>(array, index)?
}
DataType::UInt32 => {
Self::try_from_dict_array::<UInt32Type>(array, index)?
}
DataType::UInt64 => {
Self::try_from_dict_array::<UInt64Type>(array, index)?
}
_ => {
return Err(DataFusionError::Internal(format!(
"Index type not supported while creating scalar from dictionary: {}",
array.data_type(),
)))
}
},
other => {
return Err(DataFusionError::NotImplemented(format!(
"Can't create a scalar from array of type \"{:?}\"",
other
)))
}
})
}
fn try_from_dict_array<K: ArrowDictionaryKeyType>(
array: &ArrayRef,
index: usize,
) -> Result<Self> {
let dict_array = array.as_any().downcast_ref::<DictionaryArray<K>>().unwrap();
// look up the index in the values dictionary
let keys_col = dict_array.keys_array();
let values_index = keys_col.value(index).to_usize().ok_or_else(|| {
DataFusionError::Internal(format!(
"Can not convert index to usize in dictionary of type creating group by value {:?}",
keys_col.data_type()
))
})?;
Self::try_from_array(&dict_array.values(), values_index)
}
}
impl From<f64> for ScalarValue {
fn from(value: f64) -> Self {
ScalarValue::Float64(Some(value))
}
}
impl From<f32> for ScalarValue {
fn from(value: f32) -> Self {
ScalarValue::Float32(Some(value))
}
}
impl From<i8> for ScalarValue {
fn from(value: i8) -> Self {
ScalarValue::Int8(Some(value))
}
}
impl From<i16> for ScalarValue {
fn from(value: i16) -> Self {
ScalarValue::Int16(Some(value))
}
}
impl From<i32> for ScalarValue {
fn from(value: i32) -> Self {
ScalarValue::Int32(Some(value))
}
}
impl From<i64> for ScalarValue {
fn from(value: i64) -> Self {
ScalarValue::Int64(Some(value))
}
}
impl From<bool> for ScalarValue {
fn from(value: bool) -> Self {
ScalarValue::Boolean(Some(value))
}
}
impl From<u8> for ScalarValue {
fn from(value: u8) -> Self {
ScalarValue::UInt8(Some(value))
}
}
impl From<u16> for ScalarValue {
fn from(value: u16) -> Self {
ScalarValue::UInt16(Some(value))
}
}
impl From<u32> for ScalarValue {
fn from(value: u32) -> Self {
ScalarValue::UInt32(Some(value))
}
}
impl From<u64> for ScalarValue {
fn from(value: u64) -> Self {
ScalarValue::UInt64(Some(value))
}
}
macro_rules! impl_try_from {
($SCALAR:ident, $NATIVE:ident) => {
impl TryFrom<ScalarValue> for $NATIVE {
type Error = DataFusionError;
fn try_from(value: ScalarValue) -> Result<Self> {
match value {
ScalarValue::$SCALAR(Some(inner_value)) => Ok(inner_value),
_ => Err(DataFusionError::Internal(format!(
"Cannot convert {:?} to {}",
value,
std::any::type_name::<Self>()
))),
}
}
}
};
}
impl_try_from!(Int8, i8);
impl_try_from!(Int16, i16);
// special implementation for i32 because of Date32
impl TryFrom<ScalarValue> for i32 {
type Error = DataFusionError;
fn try_from(value: ScalarValue) -> Result<Self> {
match value {
ScalarValue::Int32(Some(inner_value))
| ScalarValue::Date32(Some(inner_value)) => Ok(inner_value),
_ => Err(DataFusionError::Internal(format!(
"Cannot convert {:?} to {}",
value,
std::any::type_name::<Self>()
))),
}
}
}
// special implementation for i64 because of TimeNanosecond
impl TryFrom<ScalarValue> for i64 {
type Error = DataFusionError;
fn try_from(value: ScalarValue) -> Result<Self> {
match value {
ScalarValue::Int64(Some(inner_value))
| ScalarValue::TimestampNanosecond(Some(inner_value)) => Ok(inner_value),
_ => Err(DataFusionError::Internal(format!(
"Cannot convert {:?} to {}",
value,
std::any::type_name::<Self>()
))),
}
}
}
impl_try_from!(UInt8, u8);
impl_try_from!(UInt16, u16);
impl_try_from!(UInt32, u32);
impl_try_from!(UInt64, u64);
impl_try_from!(Float32, f32);
impl_try_from!(Float64, f64);
impl_try_from!(Boolean, bool);
impl TryFrom<&DataType> for ScalarValue {
type Error = DataFusionError;
fn try_from(datatype: &DataType) -> Result<Self> {
Ok(match datatype {
DataType::Boolean => ScalarValue::Boolean(None),
DataType::Float64 => ScalarValue::Float64(None),
DataType::Float32 => ScalarValue::Float32(None),
DataType::Int8 => ScalarValue::Int8(None),
DataType::Int16 => ScalarValue::Int16(None),
DataType::Int32 => ScalarValue::Int32(None),
DataType::Int64 => ScalarValue::Int64(None),
DataType::UInt8 => ScalarValue::UInt8(None),
DataType::UInt16 => ScalarValue::UInt16(None),
DataType::UInt32 => ScalarValue::UInt32(None),
DataType::UInt64 => ScalarValue::UInt64(None),
DataType::Utf8 => ScalarValue::Utf8(None),
DataType::LargeUtf8 => ScalarValue::LargeUtf8(None),
DataType::Timestamp(TimeUnit::Second, _) => {
ScalarValue::TimestampSecond(None)
}
DataType::Timestamp(TimeUnit::Millisecond, _) => {
ScalarValue::TimestampMillisecond(None)
}
DataType::Timestamp(TimeUnit::Microsecond, _) => {
ScalarValue::TimestampMicrosecond(None)
}
DataType::Timestamp(TimeUnit::Nanosecond, _) => {
ScalarValue::TimestampNanosecond(None)
}
DataType::List(ref nested_type) => {
ScalarValue::List(None, nested_type.data_type().clone())
}
_ => {
return Err(DataFusionError::NotImplemented(format!(
"Can't create a scalar of type \"{:?}\"",
datatype
)))
}
})
}
}
macro_rules! format_option {
($F:expr, $EXPR:expr) => {{
match $EXPR {
Some(e) => write!($F, "{}", e),
None => write!($F, "NULL"),
}
}};
}
impl fmt::Display for ScalarValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ScalarValue::Boolean(e) => format_option!(f, e)?,
ScalarValue::Float32(e) => format_option!(f, e)?,
ScalarValue::Float64(e) => format_option!(f, e)?,
ScalarValue::Int8(e) => format_option!(f, e)?,
ScalarValue::Int16(e) => format_option!(f, e)?,
ScalarValue::Int32(e) => format_option!(f, e)?,
ScalarValue::Int64(e) => format_option!(f, e)?,
ScalarValue::UInt8(e) => format_option!(f, e)?,
ScalarValue::UInt16(e) => format_option!(f, e)?,
ScalarValue::UInt32(e) => format_option!(f, e)?,
ScalarValue::UInt64(e) => format_option!(f, e)?,
ScalarValue::TimestampSecond(e) => format_option!(f, e)?,
ScalarValue::TimestampMillisecond(e) => format_option!(f, e)?,
ScalarValue::TimestampMicrosecond(e) => format_option!(f, e)?,
ScalarValue::TimestampNanosecond(e) => format_option!(f, e)?,
ScalarValue::Utf8(e) => format_option!(f, e)?,
ScalarValue::LargeUtf8(e) => format_option!(f, e)?,
ScalarValue::Binary(e) => match e {
Some(l) => write!(
f,
"{}",
l.iter()
.map(|v| format!("{}", v))
.collect::<Vec<_>>()
.join(",")
)?,
None => write!(f, "NULL")?,
},
ScalarValue::LargeBinary(e) => match e {
Some(l) => write!(
f,
"{}",
l.iter()
.map(|v| format!("{}", v))
.collect::<Vec<_>>()
.join(",")
)?,
None => write!(f, "NULL")?,
},
ScalarValue::List(e, _) => match e {
Some(l) => write!(
f,
"{}",
l.iter()
.map(|v| format!("{}", v))
.collect::<Vec<_>>()
.join(",")
)?,
None => write!(f, "NULL")?,
},
ScalarValue::Date32(e) => format_option!(f, e)?,
ScalarValue::Date64(e) => format_option!(f, e)?,
ScalarValue::IntervalDayTime(e) => format_option!(f, e)?,
ScalarValue::IntervalYearMonth(e) => format_option!(f, e)?,
};
Ok(())
}
}
impl fmt::Debug for ScalarValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarValue::Boolean(_) => write!(f, "Boolean({})", self),
ScalarValue::Float32(_) => write!(f, "Float32({})", self),
ScalarValue::Float64(_) => write!(f, "Float64({})", self),
ScalarValue::Int8(_) => write!(f, "Int8({})", self),
ScalarValue::Int16(_) => write!(f, "Int16({})", self),
ScalarValue::Int32(_) => write!(f, "Int32({})", self),
ScalarValue::Int64(_) => write!(f, "Int64({})", self),
ScalarValue::UInt8(_) => write!(f, "UInt8({})", self),
ScalarValue::UInt16(_) => write!(f, "UInt16({})", self),
ScalarValue::UInt32(_) => write!(f, "UInt32({})", self),
ScalarValue::UInt64(_) => write!(f, "UInt64({})", self),
ScalarValue::TimestampSecond(_) => write!(f, "TimestampSecond({})", self),
ScalarValue::TimestampMillisecond(_) => {
write!(f, "TimestampMillisecond({})", self)
}
ScalarValue::TimestampMicrosecond(_) => {
write!(f, "TimestampMicrosecond({})", self)
}
ScalarValue::TimestampNanosecond(_) => {
write!(f, "TimestampNanosecond({})", self)
}
ScalarValue::Utf8(None) => write!(f, "Utf8({})", self),
ScalarValue::Utf8(Some(_)) => write!(f, "Utf8(\"{}\")", self),
ScalarValue::LargeUtf8(None) => write!(f, "LargeUtf8({})", self),
ScalarValue::LargeUtf8(Some(_)) => write!(f, "LargeUtf8(\"{}\")", self),
ScalarValue::Binary(None) => write!(f, "Binary({})", self),
ScalarValue::Binary(Some(_)) => write!(f, "Binary(\"{}\")", self),
ScalarValue::LargeBinary(None) => write!(f, "LargeBinary({})", self),
ScalarValue::LargeBinary(Some(_)) => write!(f, "LargeBinary(\"{}\")", self),
ScalarValue::List(_, _) => write!(f, "List([{}])", self),
ScalarValue::Date32(_) => write!(f, "Date32(\"{}\")", self),
ScalarValue::Date64(_) => write!(f, "Date64(\"{}\")", self),
ScalarValue::IntervalDayTime(_) => {
write!(f, "IntervalDayTime(\"{}\")", self)
}
ScalarValue::IntervalYearMonth(_) => {
write!(f, "IntervalYearMonth(\"{}\")", self)
}
}
}
}
/// Trait used to map a NativeTime to a ScalarType.
pub trait ScalarType<T: ArrowNativeType> {
/// returns a scalar from an optional T
fn scalar(r: Option<T>) -> ScalarValue;
}
impl ScalarType<f32> for Float32Type {
fn scalar(r: Option<f32>) -> ScalarValue {
ScalarValue::Float32(r)
}
}
impl ScalarType<i64> for TimestampSecondType {
fn scalar(r: Option<i64>) -> ScalarValue {
ScalarValue::TimestampSecond(r)
}
}
impl ScalarType<i64> for TimestampMillisecondType {
fn scalar(r: Option<i64>) -> ScalarValue {
ScalarValue::TimestampMillisecond(r)
}
}
impl ScalarType<i64> for TimestampMicrosecondType {
fn scalar(r: Option<i64>) -> ScalarValue {
ScalarValue::TimestampMicrosecond(r)
}
}
impl ScalarType<i64> for TimestampNanosecondType {
fn scalar(r: Option<i64>) -> ScalarValue {
ScalarValue::TimestampNanosecond(r)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn scalar_list_null_to_array() {
let list_array_ref = ScalarValue::List(None, DataType::UInt64).to_array();
let list_array = list_array_ref.as_any().downcast_ref::<ListArray>().unwrap();
assert!(list_array.is_null(0));
assert_eq!(list_array.len(), 1);
assert_eq!(list_array.values().len(), 0);
}
#[test]
fn scalar_list_to_array() {
let list_array_ref = ScalarValue::List(
Some(vec![
ScalarValue::UInt64(Some(100)),
ScalarValue::UInt64(None),
ScalarValue::UInt64(Some(101)),
]),
DataType::UInt64,
)
.to_array();
let list_array = list_array_ref.as_any().downcast_ref::<ListArray>().unwrap();
assert_eq!(list_array.len(), 1);
assert_eq!(list_array.values().len(), 3);
let prim_array_ref = list_array.value(0);
let prim_array = prim_array_ref
.as_any()
.downcast_ref::<UInt64Array>()
.unwrap();
assert_eq!(prim_array.len(), 3);
assert_eq!(prim_array.value(0), 100);
assert!(prim_array.is_null(1));
assert_eq!(prim_array.value(2), 101);
}
}
| 39.109783 | 100 | 0.523415 |
23fd766114948098677af1751312d340390f63cd | 511 | use std::env;
use async_std::task;
use application;
fn main() -> Result<(), std::io::Error> {
let port = env::var("PORT").unwrap_or_else(|_| "8000".to_string());
let address = format!("0.0.0.0:{}", port);
task::block_on(async {
tide::log::start();
let mut app = tide::new();
app.at("/").nest({
application::app()
});
println!(" Running server on: http://localhost:{}/", port);
app.listen(address).await?;
Ok(())
})
} | 26.894737 | 72 | 0.508806 |
d7b71269c90f1f50d83e385322ae107fb5511bdf | 5,173 | #![allow(non_snake_case, non_upper_case_globals)]
#![allow(non_camel_case_types)]
//! USB on the go full speed
//!
//! Used by: stm32f101, stm32f102, stm32f103
#[cfg(not(feature = "nosync"))]
pub use crate::stm32f1::peripherals::otg_fs_host::Instance;
pub use crate::stm32f1::peripherals::otg_fs_host::{RegisterBlock, ResetValues};
pub use crate::stm32f1::peripherals::otg_fs_host::{
FS_HCCHAR0, FS_HCCHAR1, FS_HCCHAR2, FS_HCCHAR3, FS_HCCHAR4, FS_HCCHAR5, FS_HCCHAR6, FS_HCCHAR7,
FS_HCFG, FS_HCINT0, FS_HCINT1, FS_HCINT2, FS_HCINT3, FS_HCINT4, FS_HCINT5, FS_HCINT6,
FS_HCINT7, FS_HCINTMSK0, FS_HCINTMSK1, FS_HCINTMSK2, FS_HCINTMSK3, FS_HCINTMSK4, FS_HCINTMSK5,
FS_HCINTMSK6, FS_HCINTMSK7, FS_HCTSIZ0, FS_HCTSIZ1, FS_HCTSIZ2, FS_HCTSIZ3, FS_HCTSIZ4,
FS_HCTSIZ5, FS_HCTSIZ6, FS_HCTSIZ7, FS_HFNUM, FS_HPRT, FS_HPTXSTS, HAINT, HAINTMSK, HFIR,
};
/// Access functions for the OTG_FS_HOST peripheral instance
pub mod OTG_FS_HOST {
use super::ResetValues;
#[cfg(not(feature = "nosync"))]
use super::Instance;
#[cfg(not(feature = "nosync"))]
const INSTANCE: Instance = Instance {
addr: 0x50000400,
_marker: ::core::marker::PhantomData,
};
/// Reset values for each field in OTG_FS_HOST
pub const reset: ResetValues = ResetValues {
FS_HCFG: 0x00000000,
HFIR: 0x0000EA60,
FS_HFNUM: 0x00003FFF,
FS_HPTXSTS: 0x00080100,
HAINT: 0x00000000,
HAINTMSK: 0x00000000,
FS_HPRT: 0x00000000,
FS_HCCHAR0: 0x00000000,
FS_HCCHAR1: 0x00000000,
FS_HCCHAR2: 0x00000000,
FS_HCCHAR3: 0x00000000,
FS_HCCHAR4: 0x00000000,
FS_HCCHAR5: 0x00000000,
FS_HCCHAR6: 0x00000000,
FS_HCCHAR7: 0x00000000,
FS_HCINT0: 0x00000000,
FS_HCINT1: 0x00000000,
FS_HCINT2: 0x00000000,
FS_HCINT3: 0x00000000,
FS_HCINT4: 0x00000000,
FS_HCINT5: 0x00000000,
FS_HCINT6: 0x00000000,
FS_HCINT7: 0x00000000,
FS_HCINTMSK0: 0x00000000,
FS_HCINTMSK1: 0x00000000,
FS_HCINTMSK2: 0x00000000,
FS_HCINTMSK3: 0x00000000,
FS_HCINTMSK4: 0x00000000,
FS_HCINTMSK5: 0x00000000,
FS_HCINTMSK6: 0x00000000,
FS_HCINTMSK7: 0x00000000,
FS_HCTSIZ0: 0x00000000,
FS_HCTSIZ1: 0x00000000,
FS_HCTSIZ2: 0x00000000,
FS_HCTSIZ3: 0x00000000,
FS_HCTSIZ4: 0x00000000,
FS_HCTSIZ5: 0x00000000,
FS_HCTSIZ6: 0x00000000,
FS_HCTSIZ7: 0x00000000,
};
#[cfg(not(feature = "nosync"))]
#[allow(renamed_and_removed_lints)]
#[allow(private_no_mangle_statics)]
#[no_mangle]
static mut OTG_FS_HOST_TAKEN: bool = false;
/// Safe access to OTG_FS_HOST
///
/// This function returns `Some(Instance)` if this instance is not
/// currently taken, and `None` if it is. This ensures that if you
/// do get `Some(Instance)`, you are ensured unique access to
/// the peripheral and there cannot be data races (unless other
/// code uses `unsafe`, of course). You can then pass the
/// `Instance` around to other functions as required. When you're
/// done with it, you can call `release(instance)` to return it.
///
/// `Instance` itself dereferences to a `RegisterBlock`, which
/// provides access to the peripheral's registers.
#[cfg(not(feature = "nosync"))]
#[inline]
pub fn take() -> Option<Instance> {
external_cortex_m::interrupt::free(|_| unsafe {
if OTG_FS_HOST_TAKEN {
None
} else {
OTG_FS_HOST_TAKEN = true;
Some(INSTANCE)
}
})
}
/// Release exclusive access to OTG_FS_HOST
///
/// This function allows you to return an `Instance` so that it
/// is available to `take()` again. This function will panic if
/// you return a different `Instance` or if this instance is not
/// already taken.
#[cfg(not(feature = "nosync"))]
#[inline]
pub fn release(inst: Instance) {
external_cortex_m::interrupt::free(|_| unsafe {
if OTG_FS_HOST_TAKEN && inst.addr == INSTANCE.addr {
OTG_FS_HOST_TAKEN = false;
} else {
panic!("Released a peripheral which was not taken");
}
});
}
/// Unsafely steal OTG_FS_HOST
///
/// This function is similar to take() but forcibly takes the
/// Instance, marking it as taken irregardless of its previous
/// state.
#[cfg(not(feature = "nosync"))]
#[inline]
pub unsafe fn steal() -> Instance {
OTG_FS_HOST_TAKEN = true;
INSTANCE
}
}
/// Raw pointer to OTG_FS_HOST
///
/// Dereferencing this is unsafe because you are not ensured unique
/// access to the peripheral, so you may encounter data races with
/// other users of this peripheral. It is up to you to ensure you
/// will not cause data races.
///
/// This constant is provided for ease of use in unsafe code: you can
/// simply call for example `write_reg!(gpio, GPIOA, ODR, 1);`.
pub const OTG_FS_HOST: *const RegisterBlock = 0x50000400 as *const _;
| 35.431507 | 99 | 0.645853 |
e5c0a6aadce4311caed3f3e1fbb4375ff59a41b5 | 95,189 | //! This module contains the "cleaned" pieces of the AST, and the functions
//! that clean them.
mod auto_trait;
mod blanket_impl;
pub mod cfg;
pub mod inline;
mod simplify;
pub mod types;
pub mod utils;
use rustc::hir;
use rustc::hir::def::{CtorKind, DefKind, Res};
use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX};
use rustc::hir::ptr::P;
use rustc::infer::region_constraints::{Constraint, RegionConstraintData};
use rustc::middle::lang_items;
use rustc::middle::resolve_lifetime as rl;
use rustc::middle::stability;
use rustc::ty::fold::TypeFolder;
use rustc::ty::subst::InternalSubsts;
use rustc::ty::{self, AdtKind, Lift, Ty, TyCtxt};
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_index::vec::{Idx, IndexVec};
use rustc_typeck::hir_ty_to_ty;
use syntax::ast::{self, Ident};
use syntax::attr;
use syntax_pos::hygiene::MacroKind;
use syntax_pos::symbol::{kw, sym};
use syntax_pos::{self, Pos};
use std::collections::hash_map::Entry;
use std::default::Default;
use std::hash::Hash;
use std::rc::Rc;
use std::u32;
use std::{mem, vec};
use crate::core::{self, DocContext, ImplTraitParam};
use crate::doctree;
use utils::*;
pub use utils::{get_auto_trait_and_blanket_impls, krate, register_res};
pub use self::types::FunctionRetTy::*;
pub use self::types::ItemEnum::*;
pub use self::types::SelfTy::*;
pub use self::types::Type::*;
pub use self::types::Visibility::{Inherited, Public};
pub use self::types::*;
const FN_OUTPUT_NAME: &'static str = "Output";
pub trait Clean<T> {
fn clean(&self, cx: &DocContext<'_>) -> T;
}
impl<T: Clean<U>, U> Clean<Vec<U>> for [T] {
fn clean(&self, cx: &DocContext<'_>) -> Vec<U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
impl<T: Clean<U>, U, V: Idx> Clean<IndexVec<V, U>> for IndexVec<V, T> {
fn clean(&self, cx: &DocContext<'_>) -> IndexVec<V, U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
impl<T: Clean<U>, U> Clean<U> for &T {
fn clean(&self, cx: &DocContext<'_>) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<U> for P<T> {
fn clean(&self, cx: &DocContext<'_>) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<U> for Rc<T> {
fn clean(&self, cx: &DocContext<'_>) -> U {
(**self).clean(cx)
}
}
impl<T: Clean<U>, U> Clean<Option<U>> for Option<T> {
fn clean(&self, cx: &DocContext<'_>) -> Option<U> {
self.as_ref().map(|v| v.clean(cx))
}
}
impl<T, U> Clean<U> for ty::Binder<T>
where
T: Clean<U>,
{
fn clean(&self, cx: &DocContext<'_>) -> U {
self.skip_binder().clean(cx)
}
}
impl<T: Clean<U>, U> Clean<Vec<U>> for P<[T]> {
fn clean(&self, cx: &DocContext<'_>) -> Vec<U> {
self.iter().map(|x| x.clean(cx)).collect()
}
}
impl Clean<ExternalCrate> for CrateNum {
fn clean(&self, cx: &DocContext<'_>) -> ExternalCrate {
let root = DefId { krate: *self, index: CRATE_DEF_INDEX };
let krate_span = cx.tcx.def_span(root);
let krate_src = cx.sess().source_map().span_to_filename(krate_span);
// Collect all inner modules which are tagged as implementations of
// primitives.
//
// Note that this loop only searches the top-level items of the crate,
// and this is intentional. If we were to search the entire crate for an
// item tagged with `#[doc(primitive)]` then we would also have to
// search the entirety of external modules for items tagged
// `#[doc(primitive)]`, which is a pretty inefficient process (decoding
// all that metadata unconditionally).
//
// In order to keep the metadata load under control, the
// `#[doc(primitive)]` feature is explicitly designed to only allow the
// primitive tags to show up as the top level items in a crate.
//
// Also note that this does not attempt to deal with modules tagged
// duplicately for the same primitive. This is handled later on when
// rendering by delegating everything to a hash map.
let as_primitive = |res: Res| {
if let Res::Def(DefKind::Mod, def_id) = res {
let attrs = cx.tcx.get_attrs(def_id).clean(cx);
let mut prim = None;
for attr in attrs.lists(sym::doc) {
if let Some(v) = attr.value_str() {
if attr.check_name(sym::primitive) {
prim = PrimitiveType::from_str(&v.as_str());
if prim.is_some() {
break;
}
// FIXME: should warn on unknown primitives?
}
}
}
return prim.map(|p| (def_id, p, attrs));
}
None
};
let primitives = if root.is_local() {
cx.tcx
.hir()
.krate()
.module
.item_ids
.iter()
.filter_map(|&id| {
let item = cx.tcx.hir().expect_item(id.id);
match item.kind {
hir::ItemKind::Mod(_) => {
as_primitive(Res::Def(DefKind::Mod, cx.tcx.hir().local_def_id(id.id)))
}
hir::ItemKind::Use(ref path, hir::UseKind::Single)
if item.vis.node.is_pub() =>
{
as_primitive(path.res).map(|(_, prim, attrs)| {
// Pretend the primitive is local.
(cx.tcx.hir().local_def_id(id.id), prim, attrs)
})
}
_ => None,
}
})
.collect()
} else {
cx.tcx
.item_children(root)
.iter()
.map(|item| item.res)
.filter_map(as_primitive)
.collect()
};
let as_keyword = |res: Res| {
if let Res::Def(DefKind::Mod, def_id) = res {
let attrs = cx.tcx.get_attrs(def_id).clean(cx);
let mut keyword = None;
for attr in attrs.lists(sym::doc) {
if let Some(v) = attr.value_str() {
if attr.check_name(sym::keyword) {
if v.is_doc_keyword() {
keyword = Some(v.to_string());
break;
}
// FIXME: should warn on unknown keywords?
}
}
}
return keyword.map(|p| (def_id, p, attrs));
}
None
};
let keywords = if root.is_local() {
cx.tcx
.hir()
.krate()
.module
.item_ids
.iter()
.filter_map(|&id| {
let item = cx.tcx.hir().expect_item(id.id);
match item.kind {
hir::ItemKind::Mod(_) => {
as_keyword(Res::Def(DefKind::Mod, cx.tcx.hir().local_def_id(id.id)))
}
hir::ItemKind::Use(ref path, hir::UseKind::Single)
if item.vis.node.is_pub() =>
{
as_keyword(path.res).map(|(_, prim, attrs)| {
(cx.tcx.hir().local_def_id(id.id), prim, attrs)
})
}
_ => None,
}
})
.collect()
} else {
cx.tcx.item_children(root).iter().map(|item| item.res).filter_map(as_keyword).collect()
};
ExternalCrate {
name: cx.tcx.crate_name(*self).to_string(),
src: krate_src,
attrs: cx.tcx.get_attrs(root).clean(cx),
primitives,
keywords,
}
}
}
impl Clean<Item> for doctree::Module<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let name = if self.name.is_some() {
self.name.expect("No name provided").clean(cx)
} else {
String::new()
};
// maintain a stack of mod ids, for doc comment path resolution
// but we also need to resolve the module's own docs based on whether its docs were written
// inside or outside the module, so check for that
let attrs = self.attrs.clean(cx);
let mut items: Vec<Item> = vec![];
items.extend(self.extern_crates.iter().flat_map(|x| x.clean(cx)));
items.extend(self.imports.iter().flat_map(|x| x.clean(cx)));
items.extend(self.structs.iter().map(|x| x.clean(cx)));
items.extend(self.unions.iter().map(|x| x.clean(cx)));
items.extend(self.enums.iter().map(|x| x.clean(cx)));
items.extend(self.fns.iter().map(|x| x.clean(cx)));
items.extend(self.foreigns.iter().map(|x| x.clean(cx)));
items.extend(self.mods.iter().map(|x| x.clean(cx)));
items.extend(self.typedefs.iter().map(|x| x.clean(cx)));
items.extend(self.opaque_tys.iter().map(|x| x.clean(cx)));
items.extend(self.statics.iter().map(|x| x.clean(cx)));
items.extend(self.constants.iter().map(|x| x.clean(cx)));
items.extend(self.traits.iter().map(|x| x.clean(cx)));
items.extend(self.impls.iter().flat_map(|x| x.clean(cx)));
items.extend(self.macros.iter().map(|x| x.clean(cx)));
items.extend(self.proc_macros.iter().map(|x| x.clean(cx)));
items.extend(self.trait_aliases.iter().map(|x| x.clean(cx)));
// determine if we should display the inner contents or
// the outer `mod` item for the source code.
let whence = {
let cm = cx.sess().source_map();
let outer = cm.lookup_char_pos(self.where_outer.lo());
let inner = cm.lookup_char_pos(self.where_inner.lo());
if outer.file.start_pos == inner.file.start_pos {
// mod foo { ... }
self.where_outer
} else {
// mod foo; (and a separate SourceFile for the contents)
self.where_inner
}
};
Item {
name: Some(name),
attrs,
source: whence.clean(cx),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
inner: ModuleItem(Module { is_crate: self.is_crate, items }),
}
}
}
impl Clean<Attributes> for [ast::Attribute] {
fn clean(&self, cx: &DocContext<'_>) -> Attributes {
Attributes::from_ast(cx.sess().diagnostic(), self)
}
}
impl Clean<GenericBound> for hir::GenericBound {
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
match *self {
hir::GenericBound::Outlives(lt) => GenericBound::Outlives(lt.clean(cx)),
hir::GenericBound::Trait(ref t, modifier) => {
GenericBound::TraitBound(t.clean(cx), modifier)
}
}
}
}
impl<'a, 'tcx> Clean<GenericBound> for (&'a ty::TraitRef<'tcx>, Vec<TypeBinding>) {
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
let (trait_ref, ref bounds) = *self;
inline::record_extern_fqn(cx, trait_ref.def_id, TypeKind::Trait);
let path = external_path(
cx,
cx.tcx.item_name(trait_ref.def_id),
Some(trait_ref.def_id),
true,
bounds.clone(),
trait_ref.substs,
);
debug!("ty::TraitRef\n subst: {:?}\n", trait_ref.substs);
// collect any late bound regions
let mut late_bounds = vec![];
for ty_s in trait_ref.input_types().skip(1) {
if let ty::Tuple(ts) = ty_s.kind {
for &ty_s in ts {
if let ty::Ref(ref reg, _, _) = ty_s.expect_ty().kind {
if let &ty::RegionKind::ReLateBound(..) = *reg {
debug!(" hit an ReLateBound {:?}", reg);
if let Some(Lifetime(name)) = reg.clean(cx) {
late_bounds.push(GenericParamDef {
name,
kind: GenericParamDefKind::Lifetime,
});
}
}
}
}
}
}
GenericBound::TraitBound(
PolyTrait {
trait_: ResolvedPath {
path,
param_names: None,
did: trait_ref.def_id,
is_generic: false,
},
generic_params: late_bounds,
},
hir::TraitBoundModifier::None,
)
}
}
impl<'tcx> Clean<GenericBound> for ty::TraitRef<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
(self, vec![]).clean(cx)
}
}
impl<'tcx> Clean<Option<Vec<GenericBound>>> for InternalSubsts<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Option<Vec<GenericBound>> {
let mut v = Vec::new();
v.extend(self.regions().filter_map(|r| r.clean(cx)).map(GenericBound::Outlives));
v.extend(self.types().map(|t| {
GenericBound::TraitBound(
PolyTrait { trait_: t.clean(cx), generic_params: Vec::new() },
hir::TraitBoundModifier::None,
)
}));
if !v.is_empty() { Some(v) } else { None }
}
}
impl Clean<Lifetime> for hir::Lifetime {
fn clean(&self, cx: &DocContext<'_>) -> Lifetime {
if self.hir_id != hir::DUMMY_HIR_ID {
let def = cx.tcx.named_region(self.hir_id);
match def {
Some(rl::Region::EarlyBound(_, node_id, _))
| Some(rl::Region::LateBound(_, node_id, _))
| Some(rl::Region::Free(_, node_id)) => {
if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() {
return lt;
}
}
_ => {}
}
}
Lifetime(self.name.ident().to_string())
}
}
impl Clean<Lifetime> for hir::GenericParam {
fn clean(&self, _: &DocContext<'_>) -> Lifetime {
match self.kind {
hir::GenericParamKind::Lifetime { .. } => {
if self.bounds.len() > 0 {
let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
let name = bounds.next().expect("no more bounds").name.ident();
let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
s.push_str(&format!(" + {}", bound.name.ident()));
}
Lifetime(s)
} else {
Lifetime(self.name.ident().to_string())
}
}
_ => panic!(),
}
}
}
impl Clean<Constant> for hir::ConstArg {
fn clean(&self, cx: &DocContext<'_>) -> Constant {
Constant {
type_: cx.tcx.type_of(cx.tcx.hir().body_owner_def_id(self.value.body)).clean(cx),
expr: print_const_expr(cx, self.value.body),
value: None,
is_literal: is_literal_expr(cx, self.value.body.hir_id),
}
}
}
impl Clean<Lifetime> for ty::GenericParamDef {
fn clean(&self, _cx: &DocContext<'_>) -> Lifetime {
Lifetime(self.name.to_string())
}
}
impl Clean<Option<Lifetime>> for ty::RegionKind {
fn clean(&self, cx: &DocContext<'_>) -> Option<Lifetime> {
match *self {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BrNamed(_, name)) => Some(Lifetime(name.to_string())),
ty::ReEarlyBound(ref data) => Some(Lifetime(data.name.clean(cx))),
ty::ReLateBound(..)
| ty::ReFree(..)
| ty::ReScope(..)
| ty::ReVar(..)
| ty::RePlaceholder(..)
| ty::ReEmpty
| ty::ReClosureBound(_)
| ty::ReErased => {
debug!("cannot clean region {:?}", self);
None
}
}
}
}
impl Clean<WherePredicate> for hir::WherePredicate {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
match *self {
hir::WherePredicate::BoundPredicate(ref wbp) => WherePredicate::BoundPredicate {
ty: wbp.bounded_ty.clean(cx),
bounds: wbp.bounds.clean(cx),
},
hir::WherePredicate::RegionPredicate(ref wrp) => WherePredicate::RegionPredicate {
lifetime: wrp.lifetime.clean(cx),
bounds: wrp.bounds.clean(cx),
},
hir::WherePredicate::EqPredicate(ref wrp) => {
WherePredicate::EqPredicate { lhs: wrp.lhs_ty.clean(cx), rhs: wrp.rhs_ty.clean(cx) }
}
}
}
}
impl<'a> Clean<Option<WherePredicate>> for ty::Predicate<'a> {
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
use rustc::ty::Predicate;
match *self {
Predicate::Trait(ref pred) => Some(pred.clean(cx)),
Predicate::Subtype(ref pred) => Some(pred.clean(cx)),
Predicate::RegionOutlives(ref pred) => pred.clean(cx),
Predicate::TypeOutlives(ref pred) => pred.clean(cx),
Predicate::Projection(ref pred) => Some(pred.clean(cx)),
Predicate::WellFormed(..)
| Predicate::ObjectSafe(..)
| Predicate::ClosureKind(..)
| Predicate::ConstEvaluatable(..) => panic!("not user writable"),
}
}
}
impl<'a> Clean<WherePredicate> for ty::TraitPredicate<'a> {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
WherePredicate::BoundPredicate {
ty: self.trait_ref.self_ty().clean(cx),
bounds: vec![self.trait_ref.clean(cx)],
}
}
}
impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
fn clean(&self, _cx: &DocContext<'_>) -> WherePredicate {
panic!(
"subtype predicates are an internal rustc artifact \
and should not be seen by rustdoc"
)
}
}
impl<'tcx> Clean<Option<WherePredicate>>
for ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>
{
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
let ty::OutlivesPredicate(ref a, ref b) = *self;
match (a, b) {
(ty::ReEmpty, ty::ReEmpty) => {
return None;
}
_ => {}
}
Some(WherePredicate::RegionPredicate {
lifetime: a.clean(cx).expect("failed to clean lifetime"),
bounds: vec![GenericBound::Outlives(b.clean(cx).expect("failed to clean bounds"))],
})
}
}
impl<'tcx> Clean<Option<WherePredicate>> for ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>> {
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
let ty::OutlivesPredicate(ref ty, ref lt) = *self;
match lt {
ty::ReEmpty => return None,
_ => {}
}
Some(WherePredicate::BoundPredicate {
ty: ty.clean(cx),
bounds: vec![GenericBound::Outlives(lt.clean(cx).expect("failed to clean lifetimes"))],
})
}
}
impl<'tcx> Clean<WherePredicate> for ty::ProjectionPredicate<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
WherePredicate::EqPredicate { lhs: self.projection_ty.clean(cx), rhs: self.ty.clean(cx) }
}
}
impl<'tcx> Clean<Type> for ty::ProjectionTy<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Type {
let lifted = self.lift_to_tcx(cx.tcx).unwrap();
let trait_ = match lifted.trait_ref(cx.tcx).clean(cx) {
GenericBound::TraitBound(t, _) => t.trait_,
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
};
Type::QPath {
name: cx.tcx.associated_item(self.item_def_id).ident.name.clean(cx),
self_type: box self.self_ty().clean(cx),
trait_: box trait_,
}
}
}
impl Clean<GenericParamDef> for ty::GenericParamDef {
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
let (name, kind) = match self.kind {
ty::GenericParamDefKind::Lifetime => {
(self.name.to_string(), GenericParamDefKind::Lifetime)
}
ty::GenericParamDefKind::Type { has_default, synthetic, .. } => {
let default =
if has_default { Some(cx.tcx.type_of(self.def_id).clean(cx)) } else { None };
(
self.name.clean(cx),
GenericParamDefKind::Type {
did: self.def_id,
bounds: vec![], // These are filled in from the where-clauses.
default,
synthetic,
},
)
}
ty::GenericParamDefKind::Const { .. } => (
self.name.clean(cx),
GenericParamDefKind::Const {
did: self.def_id,
ty: cx.tcx.type_of(self.def_id).clean(cx),
},
),
};
GenericParamDef { name, kind }
}
}
impl Clean<GenericParamDef> for hir::GenericParam {
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
let (name, kind) = match self.kind {
hir::GenericParamKind::Lifetime { .. } => {
let name = if self.bounds.len() > 0 {
let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
let name = bounds.next().expect("no more bounds").name.ident();
let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
s.push_str(&format!(" + {}", bound.name.ident()));
}
s
} else {
self.name.ident().to_string()
};
(name, GenericParamDefKind::Lifetime)
}
hir::GenericParamKind::Type { ref default, synthetic } => (
self.name.ident().name.clean(cx),
GenericParamDefKind::Type {
did: cx.tcx.hir().local_def_id(self.hir_id),
bounds: self.bounds.clean(cx),
default: default.clean(cx),
synthetic,
},
),
hir::GenericParamKind::Const { ref ty } => (
self.name.ident().name.clean(cx),
GenericParamDefKind::Const {
did: cx.tcx.hir().local_def_id(self.hir_id),
ty: ty.clean(cx),
},
),
};
GenericParamDef { name, kind }
}
}
impl Clean<Generics> for hir::Generics {
fn clean(&self, cx: &DocContext<'_>) -> Generics {
// Synthetic type-parameters are inserted after normal ones.
// In order for normal parameters to be able to refer to synthetic ones,
// scans them first.
fn is_impl_trait(param: &hir::GenericParam) -> bool {
match param.kind {
hir::GenericParamKind::Type { synthetic, .. } => {
synthetic == Some(hir::SyntheticTyParamKind::ImplTrait)
}
_ => false,
}
}
let impl_trait_params = self
.params
.iter()
.filter(|param| is_impl_trait(param))
.map(|param| {
let param: GenericParamDef = param.clean(cx);
match param.kind {
GenericParamDefKind::Lifetime => unreachable!(),
GenericParamDefKind::Type { did, ref bounds, .. } => {
cx.impl_trait_bounds.borrow_mut().insert(did.into(), bounds.clone());
}
GenericParamDefKind::Const { .. } => unreachable!(),
}
param
})
.collect::<Vec<_>>();
let mut params = Vec::with_capacity(self.params.len());
for p in self.params.iter().filter(|p| !is_impl_trait(p)) {
let p = p.clean(cx);
params.push(p);
}
params.extend(impl_trait_params);
let mut generics =
Generics { params, where_predicates: self.where_clause.predicates.clean(cx) };
// Some duplicates are generated for ?Sized bounds between type params and where
// predicates. The point in here is to move the bounds definitions from type params
// to where predicates when such cases occur.
for where_pred in &mut generics.where_predicates {
match *where_pred {
WherePredicate::BoundPredicate { ty: Generic(ref name), ref mut bounds } => {
if bounds.is_empty() {
for param in &mut generics.params {
match param.kind {
GenericParamDefKind::Lifetime => {}
GenericParamDefKind::Type { bounds: ref mut ty_bounds, .. } => {
if ¶m.name == name {
mem::swap(bounds, ty_bounds);
break;
}
}
GenericParamDefKind::Const { .. } => {}
}
}
}
}
_ => continue,
}
}
generics
}
}
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics, ty::GenericPredicates<'tcx>) {
fn clean(&self, cx: &DocContext<'_>) -> Generics {
use self::WherePredicate as WP;
use std::collections::BTreeMap;
let (gens, preds) = *self;
// Don't populate `cx.impl_trait_bounds` before `clean`ning `where` clauses,
// since `Clean for ty::Predicate` would consume them.
let mut impl_trait = BTreeMap::<ImplTraitParam, Vec<GenericBound>>::default();
// Bounds in the type_params and lifetimes fields are repeated in the
// predicates field (see rustc_typeck::collect::ty_generics), so remove
// them.
let stripped_typarams = gens
.params
.iter()
.filter_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => None,
ty::GenericParamDefKind::Type { synthetic, .. } => {
if param.name == kw::SelfUpper {
assert_eq!(param.index, 0);
return None;
}
if synthetic == Some(hir::SyntheticTyParamKind::ImplTrait) {
impl_trait.insert(param.index.into(), vec![]);
return None;
}
Some(param.clean(cx))
}
ty::GenericParamDefKind::Const { .. } => None,
})
.collect::<Vec<GenericParamDef>>();
// param index -> [(DefId of trait, associated type name, type)]
let mut impl_trait_proj = FxHashMap::<u32, Vec<(DefId, String, Ty<'tcx>)>>::default();
let where_predicates = preds
.predicates
.iter()
.flat_map(|(p, _)| {
let mut projection = None;
let param_idx = (|| {
if let Some(trait_ref) = p.to_opt_poly_trait_ref() {
if let ty::Param(param) = trait_ref.self_ty().kind {
return Some(param.index);
}
} else if let Some(outlives) = p.to_opt_type_outlives() {
if let ty::Param(param) = outlives.skip_binder().0.kind {
return Some(param.index);
}
} else if let ty::Predicate::Projection(p) = p {
if let ty::Param(param) = p.skip_binder().projection_ty.self_ty().kind {
projection = Some(p);
return Some(param.index);
}
}
None
})();
if let Some(param_idx) = param_idx {
if let Some(b) = impl_trait.get_mut(¶m_idx.into()) {
let p = p.clean(cx)?;
b.extend(
p.get_bounds()
.into_iter()
.flatten()
.cloned()
.filter(|b| !b.is_sized_bound(cx)),
);
let proj = projection
.map(|p| (p.skip_binder().projection_ty.clean(cx), p.skip_binder().ty));
if let Some(((_, trait_did, name), rhs)) =
proj.as_ref().and_then(|(lhs, rhs)| Some((lhs.projection()?, rhs)))
{
impl_trait_proj.entry(param_idx).or_default().push((
trait_did,
name.to_string(),
rhs,
));
}
return None;
}
}
Some(p)
})
.collect::<Vec<_>>();
for (param, mut bounds) in impl_trait {
// Move trait bounds to the front.
bounds.sort_by_key(|b| if let GenericBound::TraitBound(..) = b { false } else { true });
if let crate::core::ImplTraitParam::ParamIndex(idx) = param {
if let Some(proj) = impl_trait_proj.remove(&idx) {
for (trait_did, name, rhs) in proj {
simplify::merge_bounds(cx, &mut bounds, trait_did, &name, &rhs.clean(cx));
}
}
} else {
unreachable!();
}
cx.impl_trait_bounds.borrow_mut().insert(param, bounds);
}
// Now that `cx.impl_trait_bounds` is populated, we can process
// remaining predicates which could contain `impl Trait`.
let mut where_predicates =
where_predicates.into_iter().flat_map(|p| p.clean(cx)).collect::<Vec<_>>();
// Type parameters and have a Sized bound by default unless removed with
// ?Sized. Scan through the predicates and mark any type parameter with
// a Sized bound, removing the bounds as we find them.
//
// Note that associated types also have a sized bound by default, but we
// don't actually know the set of associated types right here so that's
// handled in cleaning associated types
let mut sized_params = FxHashSet::default();
where_predicates.retain(|pred| match *pred {
WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
if bounds.iter().any(|b| b.is_sized_bound(cx)) {
sized_params.insert(g.clone());
false
} else {
true
}
}
_ => true,
});
// Run through the type parameters again and insert a ?Sized
// unbound for any we didn't find to be Sized.
for tp in &stripped_typarams {
if !sized_params.contains(&tp.name) {
where_predicates.push(WP::BoundPredicate {
ty: Type::Generic(tp.name.clone()),
bounds: vec![GenericBound::maybe_sized(cx)],
})
}
}
// It would be nice to collect all of the bounds on a type and recombine
// them if possible, to avoid e.g., `where T: Foo, T: Bar, T: Sized, T: 'a`
// and instead see `where T: Foo + Bar + Sized + 'a`
Generics {
params: gens
.params
.iter()
.flat_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => Some(param.clean(cx)),
ty::GenericParamDefKind::Type { .. } => None,
ty::GenericParamDefKind::Const { .. } => Some(param.clean(cx)),
})
.chain(simplify::ty_params(stripped_typarams).into_iter())
.collect(),
where_predicates: simplify::where_clauses(cx, where_predicates),
}
}
}
impl<'a> Clean<Method>
for (&'a hir::FnSig, &'a hir::Generics, hir::BodyId, Option<hir::Defaultness>)
{
fn clean(&self, cx: &DocContext<'_>) -> Method {
let (generics, decl) =
enter_impl_trait(cx, || (self.1.clean(cx), (&*self.0.decl, self.2).clean(cx)));
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
Method { decl, generics, header: self.0.header, defaultness: self.3, all_types, ret_types }
}
}
impl Clean<Item> for doctree::Function<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let (generics, decl) =
enter_impl_trait(cx, || (self.generics.clean(cx), (self.decl, self.body).clean(cx)));
let did = cx.tcx.hir().local_def_id(self.id);
let constness = if cx.tcx.is_min_const_fn(did) {
hir::Constness::Const
} else {
hir::Constness::NotConst
};
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
def_id: did,
inner: FunctionItem(Function {
decl,
generics,
header: hir::FnHeader { constness, ..self.header },
all_types,
ret_types,
}),
}
}
}
impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
fn clean(&self, cx: &DocContext<'_>) -> Arguments {
Arguments {
values: self
.0
.iter()
.enumerate()
.map(|(i, ty)| {
let mut name =
self.1.get(i).map(|ident| ident.to_string()).unwrap_or(String::new());
if name.is_empty() {
name = "_".to_string();
}
Argument { name, type_: ty.clean(cx) }
})
.collect(),
}
}
}
impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
fn clean(&self, cx: &DocContext<'_>) -> Arguments {
let body = cx.tcx.hir().body(self.1);
Arguments {
values: self
.0
.iter()
.enumerate()
.map(|(i, ty)| Argument {
name: name_from_pat(&body.params[i].pat),
type_: ty.clean(cx),
})
.collect(),
}
}
}
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
where
(&'a [hir::Ty], A): Clean<Arguments>,
{
fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
FnDecl {
inputs: (&self.0.inputs[..], self.1).clean(cx),
output: self.0.output.clean(cx),
c_variadic: self.0.c_variadic,
attrs: Attributes::default(),
}
}
}
impl<'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
let (did, sig) = *self;
let mut names = if cx.tcx.hir().as_local_hir_id(did).is_some() {
vec![].into_iter()
} else {
cx.tcx.fn_arg_names(did).into_iter()
};
FnDecl {
output: Return(sig.skip_binder().output().clean(cx)),
attrs: Attributes::default(),
c_variadic: sig.skip_binder().c_variadic,
inputs: Arguments {
values: sig
.skip_binder()
.inputs()
.iter()
.map(|t| Argument {
type_: t.clean(cx),
name: names.next().map_or(String::new(), |name| name.to_string()),
})
.collect(),
},
}
}
}
impl Clean<FunctionRetTy> for hir::FunctionRetTy {
fn clean(&self, cx: &DocContext<'_>) -> FunctionRetTy {
match *self {
hir::Return(ref typ) => Return(typ.clean(cx)),
hir::DefaultReturn(..) => DefaultReturn,
}
}
}
impl Clean<Item> for doctree::Trait<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let attrs = self.attrs.clean(cx);
let is_spotlight = attrs.has_doc_flag(sym::spotlight);
Item {
name: Some(self.name.clean(cx)),
attrs,
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: TraitItem(Trait {
auto: self.is_auto.clean(cx),
unsafety: self.unsafety,
items: self.items.iter().map(|ti| ti.clean(cx)).collect(),
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
is_spotlight,
is_auto: self.is_auto.clean(cx),
}),
}
}
}
impl Clean<Item> for doctree::TraitAlias<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let attrs = self.attrs.clean(cx);
Item {
name: Some(self.name.clean(cx)),
attrs,
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: TraitAliasItem(TraitAlias {
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
}),
}
}
}
impl Clean<bool> for hir::IsAuto {
fn clean(&self, _: &DocContext<'_>) -> bool {
match *self {
hir::IsAuto::Yes => true,
hir::IsAuto::No => false,
}
}
}
impl Clean<Type> for hir::TraitRef {
fn clean(&self, cx: &DocContext<'_>) -> Type {
resolve_type(cx, self.path.clean(cx), self.hir_ref_id)
}
}
impl Clean<PolyTrait> for hir::PolyTraitRef {
fn clean(&self, cx: &DocContext<'_>) -> PolyTrait {
PolyTrait {
trait_: self.trait_ref.clean(cx),
generic_params: self.bound_generic_params.clean(cx),
}
}
}
impl Clean<Item> for hir::TraitItem<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.kind {
hir::TraitItemKind::Const(ref ty, default) => {
AssocConstItem(ty.clean(cx), default.map(|e| print_const_expr(cx, e)))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => {
MethodItem((sig, &self.generics, body, None).clean(cx))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref names)) => {
let (generics, decl) = enter_impl_trait(cx, || {
(self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
});
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
TyMethodItem(TyMethod { header: sig.header, decl, generics, all_types, ret_types })
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
AssocTypeItem(bounds.clean(cx), default.clean(cx))
}
};
let local_did = cx.tcx.hir().local_def_id(self.hir_id);
Item {
name: Some(self.ident.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: local_did,
visibility: Visibility::Inherited,
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
inner,
}
}
}
impl Clean<Item> for hir::ImplItem<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.kind {
hir::ImplItemKind::Const(ref ty, expr) => {
AssocConstItem(ty.clean(cx), Some(print_const_expr(cx, expr)))
}
hir::ImplItemKind::Method(ref sig, body) => {
MethodItem((sig, &self.generics, body, Some(self.defaultness)).clean(cx))
}
hir::ImplItemKind::TyAlias(ref ty) => {
TypedefItem(Typedef { type_: ty.clean(cx), generics: Generics::default() }, true)
}
hir::ImplItemKind::OpaqueTy(ref bounds) => OpaqueTyItem(
OpaqueTy { bounds: bounds.clean(cx), generics: Generics::default() },
true,
),
};
let local_did = cx.tcx.hir().local_def_id(self.hir_id);
Item {
name: Some(self.ident.name.clean(cx)),
source: self.span.clean(cx),
attrs: self.attrs.clean(cx),
def_id: local_did,
visibility: self.vis.clean(cx),
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
inner,
}
}
}
impl Clean<Item> for ty::AssocItem {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.kind {
ty::AssocKind::Const => {
let ty = cx.tcx.type_of(self.def_id);
let default = if self.defaultness.has_value() {
Some(inline::print_inlined_const(cx, self.def_id))
} else {
None
};
AssocConstItem(ty.clean(cx), default)
}
ty::AssocKind::Method => {
let generics =
(cx.tcx.generics_of(self.def_id), cx.tcx.explicit_predicates_of(self.def_id))
.clean(cx);
let sig = cx.tcx.fn_sig(self.def_id);
let mut decl = (self.def_id, sig).clean(cx);
if self.method_has_self_argument {
let self_ty = match self.container {
ty::ImplContainer(def_id) => cx.tcx.type_of(def_id),
ty::TraitContainer(_) => cx.tcx.types.self_param,
};
let self_arg_ty = *sig.input(0).skip_binder();
if self_arg_ty == self_ty {
decl.inputs.values[0].type_ = Generic(String::from("Self"));
} else if let ty::Ref(_, ty, _) = self_arg_ty.kind {
if ty == self_ty {
match decl.inputs.values[0].type_ {
BorrowedRef { ref mut type_, .. } => {
**type_ = Generic(String::from("Self"))
}
_ => unreachable!(),
}
}
}
}
let provided = match self.container {
ty::ImplContainer(_) => true,
ty::TraitContainer(_) => self.defaultness.has_value(),
};
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
if provided {
let constness = if cx.tcx.is_min_const_fn(self.def_id) {
hir::Constness::Const
} else {
hir::Constness::NotConst
};
let asyncness = cx.tcx.asyncness(self.def_id);
let defaultness = match self.container {
ty::ImplContainer(_) => Some(self.defaultness),
ty::TraitContainer(_) => None,
};
MethodItem(Method {
generics,
decl,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness,
asyncness,
},
defaultness,
all_types,
ret_types,
})
} else {
TyMethodItem(TyMethod {
generics,
decl,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
all_types,
ret_types,
})
}
}
ty::AssocKind::Type => {
let my_name = self.ident.name.clean(cx);
if let ty::TraitContainer(did) = self.container {
// When loading a cross-crate associated type, the bounds for this type
// are actually located on the trait/impl itself, so we need to load
// all of the generics from there and then look for bounds that are
// applied to this associated type in question.
let predicates = cx.tcx.explicit_predicates_of(did);
let generics = (cx.tcx.generics_of(did), predicates).clean(cx);
let mut bounds = generics
.where_predicates
.iter()
.filter_map(|pred| {
let (name, self_type, trait_, bounds) = match *pred {
WherePredicate::BoundPredicate {
ty: QPath { ref name, ref self_type, ref trait_ },
ref bounds,
} => (name, self_type, trait_, bounds),
_ => return None,
};
if *name != my_name {
return None;
}
match **trait_ {
ResolvedPath { did, .. } if did == self.container.id() => {}
_ => return None,
}
match **self_type {
Generic(ref s) if *s == "Self" => {}
_ => return None,
}
Some(bounds)
})
.flat_map(|i| i.iter().cloned())
.collect::<Vec<_>>();
// Our Sized/?Sized bound didn't get handled when creating the generics
// because we didn't actually get our whole set of bounds until just now
// (some of them may have come from the trait). If we do have a sized
// bound, we remove it, and if we don't then we add the `?Sized` bound
// at the end.
match bounds.iter().position(|b| b.is_sized_bound(cx)) {
Some(i) => {
bounds.remove(i);
}
None => bounds.push(GenericBound::maybe_sized(cx)),
}
let ty = if self.defaultness.has_value() {
Some(cx.tcx.type_of(self.def_id))
} else {
None
};
AssocTypeItem(bounds, ty.clean(cx))
} else {
TypedefItem(
Typedef {
type_: cx.tcx.type_of(self.def_id).clean(cx),
generics: Generics { params: Vec::new(), where_predicates: Vec::new() },
},
true,
)
}
}
ty::AssocKind::OpaqueTy => unimplemented!(),
};
let visibility = match self.container {
ty::ImplContainer(_) => self.vis.clean(cx),
ty::TraitContainer(_) => Inherited,
};
Item {
name: Some(self.ident.name.clean(cx)),
visibility,
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
def_id: self.def_id,
attrs: inline::load_attrs(cx, self.def_id).clean(cx),
source: cx.tcx.def_span(self.def_id).clean(cx),
inner,
}
}
}
impl Clean<Type> for hir::Ty {
fn clean(&self, cx: &DocContext<'_>) -> Type {
use rustc::hir::*;
match self.kind {
TyKind::Never => Never,
TyKind::Ptr(ref m) => RawPointer(m.mutbl, box m.ty.clean(cx)),
TyKind::Rptr(ref l, ref m) => {
let lifetime = if l.is_elided() { None } else { Some(l.clean(cx)) };
BorrowedRef { lifetime, mutability: m.mutbl, type_: box m.ty.clean(cx) }
}
TyKind::Slice(ref ty) => Slice(box ty.clean(cx)),
TyKind::Array(ref ty, ref length) => {
let def_id = cx.tcx.hir().local_def_id(length.hir_id);
let length = match cx.tcx.const_eval_poly(def_id) {
Ok(length) => print_const(cx, length),
Err(_) => cx
.sess()
.source_map()
.span_to_snippet(cx.tcx.def_span(def_id))
.unwrap_or_else(|_| "_".to_string()),
};
Array(box ty.clean(cx), length)
}
TyKind::Tup(ref tys) => Tuple(tys.clean(cx)),
TyKind::Def(item_id, _) => {
let item = cx.tcx.hir().expect_item(item_id.id);
if let hir::ItemKind::OpaqueTy(ref ty) = item.kind {
ImplTrait(ty.bounds.clean(cx))
} else {
unreachable!()
}
}
TyKind::Path(hir::QPath::Resolved(None, ref path)) => {
if let Res::Def(DefKind::TyParam, did) = path.res {
if let Some(new_ty) = cx.ty_substs.borrow().get(&did).cloned() {
return new_ty;
}
if let Some(bounds) = cx.impl_trait_bounds.borrow_mut().remove(&did.into()) {
return ImplTrait(bounds);
}
}
let mut alias = None;
if let Res::Def(DefKind::TyAlias, def_id) = path.res {
// Substitute private type aliases
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) {
if !cx.renderinfo.borrow().access_levels.is_exported(def_id) {
alias = Some(&cx.tcx.hir().expect_item(hir_id).kind);
}
}
};
if let Some(&hir::ItemKind::TyAlias(ref ty, ref generics)) = alias {
let provided_params = &path.segments.last().expect("segments were empty");
let mut ty_substs = FxHashMap::default();
let mut lt_substs = FxHashMap::default();
let mut ct_substs = FxHashMap::default();
let generic_args = provided_params.generic_args();
{
let mut indices: GenericParamCount = Default::default();
for param in generics.params.iter() {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {
let mut j = 0;
let lifetime =
generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Lifetime(lt) => {
if indices.lifetimes == j {
return Some(lt);
}
j += 1;
None
}
_ => None,
});
if let Some(lt) = lifetime.cloned() {
if !lt.is_elided() {
let lt_def_id = cx.tcx.hir().local_def_id(param.hir_id);
lt_substs.insert(lt_def_id, lt.clean(cx));
}
}
indices.lifetimes += 1;
}
hir::GenericParamKind::Type { ref default, .. } => {
let ty_param_def_id = cx.tcx.hir().local_def_id(param.hir_id);
let mut j = 0;
let type_ =
generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Type(ty) => {
if indices.types == j {
return Some(ty);
}
j += 1;
None
}
_ => None,
});
if let Some(ty) = type_ {
ty_substs.insert(ty_param_def_id, ty.clean(cx));
} else if let Some(default) = default.clone() {
ty_substs.insert(ty_param_def_id, default.clean(cx));
}
indices.types += 1;
}
hir::GenericParamKind::Const { .. } => {
let const_param_def_id =
cx.tcx.hir().local_def_id(param.hir_id);
let mut j = 0;
let const_ =
generic_args.args.iter().find_map(|arg| match arg {
hir::GenericArg::Const(ct) => {
if indices.consts == j {
return Some(ct);
}
j += 1;
None
}
_ => None,
});
if let Some(ct) = const_ {
ct_substs.insert(const_param_def_id, ct.clean(cx));
}
// FIXME(const_generics:defaults)
indices.consts += 1;
}
}
}
}
return cx.enter_alias(ty_substs, lt_substs, ct_substs, || ty.clean(cx));
}
resolve_type(cx, path.clean(cx), self.hir_id)
}
TyKind::Path(hir::QPath::Resolved(Some(ref qself), ref p)) => {
let segments = if p.is_global() { &p.segments[1..] } else { &p.segments };
let trait_segments = &segments[..segments.len() - 1];
let trait_path = self::Path {
global: p.is_global(),
res: Res::Def(
DefKind::Trait,
cx.tcx.associated_item(p.res.def_id()).container.id(),
),
segments: trait_segments.clean(cx),
};
Type::QPath {
name: p.segments.last().expect("segments were empty").ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path, self.hir_id),
}
}
TyKind::Path(hir::QPath::TypeRelative(ref qself, ref segment)) => {
let mut res = Res::Err;
let ty = hir_ty_to_ty(cx.tcx, self);
if let ty::Projection(proj) = ty.kind {
res = Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id);
}
let trait_path = hir::Path { span: self.span, res, segments: vec![].into() };
Type::QPath {
name: segment.ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.hir_id),
}
}
TyKind::TraitObject(ref bounds, ref lifetime) => {
match bounds[0].clean(cx).trait_ {
ResolvedPath { path, param_names: None, did, is_generic } => {
let mut bounds: Vec<self::GenericBound> = bounds[1..]
.iter()
.map(|bound| {
self::GenericBound::TraitBound(
bound.clean(cx),
hir::TraitBoundModifier::None,
)
})
.collect();
if !lifetime.is_elided() {
bounds.push(self::GenericBound::Outlives(lifetime.clean(cx)));
}
ResolvedPath { path, param_names: Some(bounds), did, is_generic }
}
_ => Infer, // shouldn't happen
}
}
TyKind::BareFn(ref barefn) => BareFunction(box barefn.clean(cx)),
TyKind::Infer | TyKind::Err => Infer,
TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.kind),
}
}
}
impl<'tcx> Clean<Type> for Ty<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Type {
debug!("cleaning type: {:?}", self);
match self.kind {
ty::Never => Never,
ty::Bool => Primitive(PrimitiveType::Bool),
ty::Char => Primitive(PrimitiveType::Char),
ty::Int(int_ty) => Primitive(int_ty.into()),
ty::Uint(uint_ty) => Primitive(uint_ty.into()),
ty::Float(float_ty) => Primitive(float_ty.into()),
ty::Str => Primitive(PrimitiveType::Str),
ty::Slice(ty) => Slice(box ty.clean(cx)),
ty::Array(ty, n) => {
let mut n = cx.tcx.lift(&n).expect("array lift failed");
n = n.eval(cx.tcx, ty::ParamEnv::reveal_all());
let n = print_const(cx, n);
Array(box ty.clean(cx), n)
}
ty::RawPtr(mt) => RawPointer(mt.mutbl, box mt.ty.clean(cx)),
ty::Ref(r, ty, mutbl) => {
BorrowedRef { lifetime: r.clean(cx), mutability: mutbl, type_: box ty.clean(cx) }
}
ty::FnDef(..) | ty::FnPtr(_) => {
let ty = cx.tcx.lift(self).expect("FnPtr lift failed");
let sig = ty.fn_sig(cx.tcx);
let local_def_id = cx.tcx.hir().local_def_id_from_node_id(ast::CRATE_NODE_ID);
BareFunction(box BareFunctionDecl {
unsafety: sig.unsafety(),
generic_params: Vec::new(),
decl: (local_def_id, sig).clean(cx),
abi: sig.abi(),
})
}
ty::Adt(def, substs) => {
let did = def.did;
let kind = match def.adt_kind() {
AdtKind::Struct => TypeKind::Struct,
AdtKind::Union => TypeKind::Union,
AdtKind::Enum => TypeKind::Enum,
};
inline::record_extern_fqn(cx, did, kind);
let path = external_path(cx, cx.tcx.item_name(did), None, false, vec![], substs);
ResolvedPath { path, param_names: None, did, is_generic: false }
}
ty::Foreign(did) => {
inline::record_extern_fqn(cx, did, TypeKind::Foreign);
let path = external_path(
cx,
cx.tcx.item_name(did),
None,
false,
vec![],
InternalSubsts::empty(),
);
ResolvedPath { path, param_names: None, did, is_generic: false }
}
ty::Dynamic(ref obj, ref reg) => {
// HACK: pick the first `did` as the `did` of the trait object. Someone
// might want to implement "native" support for marker-trait-only
// trait objects.
let mut dids = obj.principal_def_id().into_iter().chain(obj.auto_traits());
let did = dids
.next()
.unwrap_or_else(|| panic!("found trait object `{:?}` with no traits?", self));
let substs = match obj.principal() {
Some(principal) => principal.skip_binder().substs,
// marker traits have no substs.
_ => cx.tcx.intern_substs(&[]),
};
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let mut param_names = vec![];
reg.clean(cx).map(|b| param_names.push(GenericBound::Outlives(b)));
for did in dids {
let empty = cx.tcx.intern_substs(&[]);
let path =
external_path(cx, cx.tcx.item_name(did), Some(did), false, vec![], empty);
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let bound = GenericBound::TraitBound(
PolyTrait {
trait_: ResolvedPath {
path,
param_names: None,
did,
is_generic: false,
},
generic_params: Vec::new(),
},
hir::TraitBoundModifier::None,
);
param_names.push(bound);
}
let mut bindings = vec![];
for pb in obj.projection_bounds() {
bindings.push(TypeBinding {
name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
kind: TypeBindingKind::Equality { ty: pb.skip_binder().ty.clean(cx) },
});
}
let path =
external_path(cx, cx.tcx.item_name(did), Some(did), false, bindings, substs);
ResolvedPath { path, param_names: Some(param_names), did, is_generic: false }
}
ty::Tuple(ref t) => {
Tuple(t.iter().map(|t| t.expect_ty()).collect::<Vec<_>>().clean(cx))
}
ty::Projection(ref data) => data.clean(cx),
ty::Param(ref p) => {
if let Some(bounds) = cx.impl_trait_bounds.borrow_mut().remove(&p.index.into()) {
ImplTrait(bounds)
} else {
Generic(p.name.to_string())
}
}
ty::Opaque(def_id, substs) => {
// Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
// by looking up the projections associated with the def_id.
let predicates_of = cx.tcx.explicit_predicates_of(def_id);
let substs = cx.tcx.lift(&substs).expect("Opaque lift failed");
let bounds = predicates_of.instantiate(cx.tcx, substs);
let mut regions = vec![];
let mut has_sized = false;
let mut bounds = bounds
.predicates
.iter()
.filter_map(|predicate| {
let trait_ref = if let Some(tr) = predicate.to_opt_poly_trait_ref() {
tr
} else if let ty::Predicate::TypeOutlives(pred) = *predicate {
// these should turn up at the end
pred.skip_binder()
.1
.clean(cx)
.map(|r| regions.push(GenericBound::Outlives(r)));
return None;
} else {
return None;
};
if let Some(sized) = cx.tcx.lang_items().sized_trait() {
if trait_ref.def_id() == sized {
has_sized = true;
return None;
}
}
let bounds = bounds
.predicates
.iter()
.filter_map(|pred| {
if let ty::Predicate::Projection(proj) = *pred {
let proj = proj.skip_binder();
if proj.projection_ty.trait_ref(cx.tcx)
== *trait_ref.skip_binder()
{
Some(TypeBinding {
name: cx
.tcx
.associated_item(proj.projection_ty.item_def_id)
.ident
.name
.clean(cx),
kind: TypeBindingKind::Equality {
ty: proj.ty.clean(cx),
},
})
} else {
None
}
} else {
None
}
})
.collect();
Some((trait_ref.skip_binder(), bounds).clean(cx))
})
.collect::<Vec<_>>();
bounds.extend(regions);
if !has_sized && !bounds.is_empty() {
bounds.insert(0, GenericBound::maybe_sized(cx));
}
ImplTrait(bounds)
}
ty::Closure(..) | ty::Generator(..) => Tuple(vec![]), // FIXME(pcwalton)
ty::Bound(..) => panic!("Bound"),
ty::Placeholder(..) => panic!("Placeholder"),
ty::UnnormalizedProjection(..) => panic!("UnnormalizedProjection"),
ty::GeneratorWitness(..) => panic!("GeneratorWitness"),
ty::Infer(..) => panic!("Infer"),
ty::Error => panic!("Error"),
}
}
}
impl<'tcx> Clean<Constant> for ty::Const<'tcx> {
fn clean(&self, cx: &DocContext<'_>) -> Constant {
Constant {
type_: self.ty.clean(cx),
expr: format!("{}", self),
value: None,
is_literal: false,
}
}
}
impl Clean<Item> for hir::StructField<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let local_did = cx.tcx.hir().local_def_id(self.hir_id);
Item {
name: Some(self.ident.name).clean(cx),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
visibility: self.vis.clean(cx),
stability: get_stability(cx, local_did),
deprecation: get_deprecation(cx, local_did),
def_id: local_did,
inner: StructFieldItem(self.ty.clean(cx)),
}
}
}
impl Clean<Item> for ty::FieldDef {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.ident.name).clean(cx),
attrs: cx.tcx.get_attrs(self.did).clean(cx),
source: cx.tcx.def_span(self.did).clean(cx),
visibility: self.vis.clean(cx),
stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did),
def_id: self.did,
inner: StructFieldItem(cx.tcx.type_of(self.did).clean(cx)),
}
}
}
impl Clean<Visibility> for hir::Visibility {
fn clean(&self, cx: &DocContext<'_>) -> Visibility {
match self.node {
hir::VisibilityKind::Public => Visibility::Public,
hir::VisibilityKind::Inherited => Visibility::Inherited,
hir::VisibilityKind::Crate(_) => Visibility::Crate,
hir::VisibilityKind::Restricted { ref path, .. } => {
let path = path.clean(cx);
let did = register_res(cx, path.res);
Visibility::Restricted(did, path)
}
}
}
}
impl Clean<Visibility> for ty::Visibility {
fn clean(&self, _: &DocContext<'_>) -> Visibility {
if *self == ty::Visibility::Public { Public } else { Inherited }
}
}
impl Clean<Item> for doctree::Struct<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: StructItem(Struct {
struct_type: self.struct_type,
generics: self.generics.clean(cx),
fields: self.fields.clean(cx),
fields_stripped: false,
}),
}
}
}
impl Clean<Item> for doctree::Union<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: UnionItem(Union {
struct_type: self.struct_type,
generics: self.generics.clean(cx),
fields: self.fields.clean(cx),
fields_stripped: false,
}),
}
}
}
impl Clean<VariantStruct> for ::rustc::hir::VariantData<'_> {
fn clean(&self, cx: &DocContext<'_>) -> VariantStruct {
VariantStruct {
struct_type: doctree::struct_type_from_def(self),
fields: self.fields().iter().map(|x| x.clean(cx)).collect(),
fields_stripped: false,
}
}
}
impl Clean<Item> for doctree::Enum<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: EnumItem(Enum {
variants: self.variants.iter().map(|v| v.clean(cx)).collect(),
generics: self.generics.clean(cx),
variants_stripped: false,
}),
}
}
}
impl Clean<Item> for doctree::Variant<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: Inherited,
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
inner: VariantItem(Variant { kind: self.def.clean(cx) }),
}
}
}
impl Clean<Item> for ty::VariantDef {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let kind = match self.ctor_kind {
CtorKind::Const => VariantKind::CLike,
CtorKind::Fn => VariantKind::Tuple(
self.fields.iter().map(|f| cx.tcx.type_of(f.did).clean(cx)).collect(),
),
CtorKind::Fictive => VariantKind::Struct(VariantStruct {
struct_type: doctree::Plain,
fields_stripped: false,
fields: self
.fields
.iter()
.map(|field| Item {
source: cx.tcx.def_span(field.did).clean(cx),
name: Some(field.ident.name.clean(cx)),
attrs: cx.tcx.get_attrs(field.did).clean(cx),
visibility: field.vis.clean(cx),
def_id: field.did,
stability: get_stability(cx, field.did),
deprecation: get_deprecation(cx, field.did),
inner: StructFieldItem(cx.tcx.type_of(field.did).clean(cx)),
})
.collect(),
}),
};
Item {
name: Some(self.ident.clean(cx)),
attrs: inline::load_attrs(cx, self.def_id).clean(cx),
source: cx.tcx.def_span(self.def_id).clean(cx),
visibility: Inherited,
def_id: self.def_id,
inner: VariantItem(Variant { kind }),
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
}
}
}
impl Clean<VariantKind> for hir::VariantData<'_> {
fn clean(&self, cx: &DocContext<'_>) -> VariantKind {
match self {
hir::VariantData::Struct(..) => VariantKind::Struct(self.clean(cx)),
hir::VariantData::Tuple(..) => {
VariantKind::Tuple(self.fields().iter().map(|x| x.ty.clean(cx)).collect())
}
hir::VariantData::Unit(..) => VariantKind::CLike,
}
}
}
impl Clean<Span> for syntax_pos::Span {
fn clean(&self, cx: &DocContext<'_>) -> Span {
if self.is_dummy() {
return Span::empty();
}
let cm = cx.sess().source_map();
let filename = cm.span_to_filename(*self);
let lo = cm.lookup_char_pos(self.lo());
let hi = cm.lookup_char_pos(self.hi());
Span {
filename,
loline: lo.line,
locol: lo.col.to_usize(),
hiline: hi.line,
hicol: hi.col.to_usize(),
original: *self,
}
}
}
impl Clean<Path> for hir::Path {
fn clean(&self, cx: &DocContext<'_>) -> Path {
Path {
global: self.is_global(),
res: self.res,
segments: if self.is_global() { &self.segments[1..] } else { &self.segments }.clean(cx),
}
}
}
impl Clean<GenericArgs> for hir::GenericArgs {
fn clean(&self, cx: &DocContext<'_>) -> GenericArgs {
if self.parenthesized {
let output = self.bindings[0].ty().clean(cx);
GenericArgs::Parenthesized {
inputs: self.inputs().clean(cx),
output: if output != Type::Tuple(Vec::new()) { Some(output) } else { None },
}
} else {
let elide_lifetimes = self.args.iter().all(|arg| match arg {
hir::GenericArg::Lifetime(lt) => lt.is_elided(),
_ => true,
});
GenericArgs::AngleBracketed {
args: self
.args
.iter()
.filter_map(|arg| match arg {
hir::GenericArg::Lifetime(lt) if !elide_lifetimes => {
Some(GenericArg::Lifetime(lt.clean(cx)))
}
hir::GenericArg::Lifetime(_) => None,
hir::GenericArg::Type(ty) => Some(GenericArg::Type(ty.clean(cx))),
hir::GenericArg::Const(ct) => Some(GenericArg::Const(ct.clean(cx))),
})
.collect(),
bindings: self.bindings.clean(cx),
}
}
}
}
impl Clean<PathSegment> for hir::PathSegment {
fn clean(&self, cx: &DocContext<'_>) -> PathSegment {
PathSegment { name: self.ident.name.clean(cx), args: self.generic_args().clean(cx) }
}
}
impl Clean<String> for Ident {
#[inline]
fn clean(&self, cx: &DocContext<'_>) -> String {
self.name.clean(cx)
}
}
impl Clean<String> for ast::Name {
#[inline]
fn clean(&self, _: &DocContext<'_>) -> String {
self.to_string()
}
}
impl Clean<Item> for doctree::Typedef<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: TypedefItem(
Typedef { type_: self.ty.clean(cx), generics: self.gen.clean(cx) },
false,
),
}
}
}
impl Clean<Item> for doctree::OpaqueTy<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: OpaqueTyItem(
OpaqueTy {
bounds: self.opaque_ty.bounds.clean(cx),
generics: self.opaque_ty.generics.clean(cx),
},
false,
),
}
}
}
impl Clean<BareFunctionDecl> for hir::BareFnTy {
fn clean(&self, cx: &DocContext<'_>) -> BareFunctionDecl {
let (generic_params, decl) = enter_impl_trait(cx, || {
(self.generic_params.clean(cx), (&*self.decl, &self.param_names[..]).clean(cx))
});
BareFunctionDecl { unsafety: self.unsafety, abi: self.abi, decl, generic_params }
}
}
impl Clean<Item> for doctree::Static<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
debug!("cleaning static {}: {:?}", self.name.clean(cx), self);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: StaticItem(Static {
type_: self.type_.clean(cx),
mutability: self.mutability,
expr: print_const_expr(cx, self.expr),
}),
}
}
}
impl Clean<Item> for doctree::Constant<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let def_id = cx.tcx.hir().local_def_id(self.id);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id,
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: ConstantItem(Constant {
type_: self.type_.clean(cx),
expr: print_const_expr(cx, self.expr),
value: print_evaluated_const(cx, def_id),
is_literal: is_literal_expr(cx, self.expr.hir_id),
}),
}
}
}
impl Clean<ImplPolarity> for ty::ImplPolarity {
fn clean(&self, _: &DocContext<'_>) -> ImplPolarity {
match self {
&ty::ImplPolarity::Positive |
// FIXME: do we want to do something else here?
&ty::ImplPolarity::Reservation => ImplPolarity::Positive,
&ty::ImplPolarity::Negative => ImplPolarity::Negative,
}
}
}
impl Clean<Vec<Item>> for doctree::Impl<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
let mut ret = Vec::new();
let trait_ = self.trait_.clean(cx);
let items = self.items.iter().map(|ii| ii.clean(cx)).collect::<Vec<_>>();
let def_id = cx.tcx.hir().local_def_id(self.id);
// If this impl block is an implementation of the Deref trait, then we
// need to try inlining the target's inherent impl blocks as well.
if trait_.def_id() == cx.tcx.lang_items().deref_trait() {
build_deref_target_impls(cx, &items, &mut ret);
}
let provided = trait_
.def_id()
.map(|did| {
cx.tcx
.provided_trait_methods(did)
.into_iter()
.map(|meth| meth.ident.to_string())
.collect()
})
.unwrap_or_default();
ret.push(Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id,
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner: ImplItem(Impl {
unsafety: self.unsafety,
generics: self.generics.clean(cx),
provided_trait_methods: provided,
trait_,
for_: self.for_.clean(cx),
items,
polarity: Some(cx.tcx.impl_polarity(def_id).clean(cx)),
synthetic: false,
blanket_impl: None,
}),
});
ret
}
}
impl Clean<Vec<Item>> for doctree::ExternCrate<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
let please_inline = self.vis.node.is_pub()
&& self.attrs.iter().any(|a| {
a.check_name(sym::doc)
&& match a.meta_item_list() {
Some(l) => attr::list_contains_name(&l, sym::inline),
None => false,
}
});
if please_inline {
let mut visited = FxHashSet::default();
let res = Res::Def(DefKind::Mod, DefId { krate: self.cnum, index: CRATE_DEF_INDEX });
if let Some(items) = inline::try_inline(
cx,
res,
self.name,
Some(rustc::ty::Attributes::Borrowed(self.attrs)),
&mut visited,
) {
return items;
}
}
vec![Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: DefId { krate: self.cnum, index: CRATE_DEF_INDEX },
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
inner: ExternCrateItem(self.name.clean(cx), self.path.clone()),
}]
}
}
impl Clean<Vec<Item>> for doctree::Import<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
// We consider inlining the documentation of `pub use` statements, but we
// forcefully don't inline if this is not public or if the
// #[doc(no_inline)] attribute is present.
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
let mut denied = !self.vis.node.is_pub()
|| self.attrs.iter().any(|a| {
a.check_name(sym::doc)
&& match a.meta_item_list() {
Some(l) => {
attr::list_contains_name(&l, sym::no_inline)
|| attr::list_contains_name(&l, sym::hidden)
}
None => false,
}
});
// Also check whether imports were asked to be inlined, in case we're trying to re-export a
// crate in Rust 2018+
let please_inline = self.attrs.lists(sym::doc).has_word(sym::inline);
let path = self.path.clean(cx);
let inner = if self.glob {
if !denied {
let mut visited = FxHashSet::default();
if let Some(items) = inline::try_inline_glob(cx, path.res, &mut visited) {
return items;
}
}
Import::Glob(resolve_use_source(cx, path))
} else {
let name = self.name;
if !please_inline {
match path.res {
Res::Def(DefKind::Mod, did) => {
if !did.is_local() && did.index == CRATE_DEF_INDEX {
// if we're `pub use`ing an extern crate root, don't inline it unless we
// were specifically asked for it
denied = true;
}
}
_ => {}
}
}
if !denied {
let mut visited = FxHashSet::default();
if let Some(items) = inline::try_inline(
cx,
path.res,
name,
Some(rustc::ty::Attributes::Borrowed(self.attrs)),
&mut visited,
) {
return items;
}
}
Import::Simple(name.clean(cx), resolve_use_source(cx, path))
};
vec![Item {
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id_from_node_id(ast::CRATE_NODE_ID),
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
inner: ImportItem(inner),
}]
}
}
impl Clean<Item> for doctree::ForeignItem<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let inner = match self.kind {
hir::ForeignItemKind::Fn(ref decl, ref names, ref generics) => {
let abi = cx.tcx.hir().get_foreign_abi(self.id);
let (generics, decl) =
enter_impl_trait(cx, || (generics.clean(cx), (&**decl, &names[..]).clean(cx)));
let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
ForeignFunctionItem(Function {
decl,
generics,
header: hir::FnHeader {
unsafety: hir::Unsafety::Unsafe,
abi,
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
all_types,
ret_types,
})
}
hir::ForeignItemKind::Static(ref ty, mutbl) => ForeignStaticItem(Static {
type_: ty.clean(cx),
mutability: *mutbl,
expr: String::new(),
}),
hir::ForeignItemKind::Type => ForeignTypeItem,
};
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
visibility: self.vis.clean(cx),
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
inner,
}
}
}
impl Clean<Item> for doctree::Macro<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let name = self.name.clean(cx);
Item {
name: Some(name.clone()),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: Public,
stability: cx.stability(self.hid).clean(cx),
deprecation: cx.deprecation(self.hid).clean(cx),
def_id: self.def_id,
inner: MacroItem(Macro {
source: format!(
"macro_rules! {} {{\n{}}}",
name,
self.matchers
.iter()
.map(|span| { format!(" {} => {{ ... }};\n", span.to_src(cx)) })
.collect::<String>()
),
imported_from: self.imported_from.clean(cx),
}),
}
}
}
impl Clean<Item> for doctree::ProcMacro<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
visibility: Public,
stability: cx.stability(self.id).clean(cx),
deprecation: cx.deprecation(self.id).clean(cx),
def_id: cx.tcx.hir().local_def_id(self.id),
inner: ProcMacroItem(ProcMacro { kind: self.kind, helpers: self.helpers.clean(cx) }),
}
}
}
impl Clean<Stability> for attr::Stability {
fn clean(&self, _: &DocContext<'_>) -> Stability {
Stability {
level: stability::StabilityLevel::from_attr_level(&self.level),
feature: Some(self.feature.to_string()).filter(|f| !f.is_empty()),
since: match self.level {
attr::Stable { ref since } => since.to_string(),
_ => String::new(),
},
deprecation: self.rustc_depr.as_ref().map(|d| Deprecation {
note: Some(d.reason.to_string()).filter(|r| !r.is_empty()),
since: Some(d.since.to_string()).filter(|d| !d.is_empty()),
}),
unstable_reason: match self.level {
attr::Unstable { reason: Some(ref reason), .. } => Some(reason.to_string()),
_ => None,
},
issue: match self.level {
attr::Unstable { issue, .. } => issue,
_ => None,
},
}
}
}
impl Clean<Deprecation> for attr::Deprecation {
fn clean(&self, _: &DocContext<'_>) -> Deprecation {
Deprecation {
since: self.since.map(|s| s.to_string()).filter(|s| !s.is_empty()),
note: self.note.map(|n| n.to_string()).filter(|n| !n.is_empty()),
}
}
}
impl Clean<TypeBinding> for hir::TypeBinding {
fn clean(&self, cx: &DocContext<'_>) -> TypeBinding {
TypeBinding { name: self.ident.name.clean(cx), kind: self.kind.clean(cx) }
}
}
impl Clean<TypeBindingKind> for hir::TypeBindingKind {
fn clean(&self, cx: &DocContext<'_>) -> TypeBindingKind {
match *self {
hir::TypeBindingKind::Equality { ref ty } => {
TypeBindingKind::Equality { ty: ty.clean(cx) }
}
hir::TypeBindingKind::Constraint { ref bounds } => TypeBindingKind::Constraint {
bounds: bounds.into_iter().map(|b| b.clean(cx)).collect(),
},
}
}
}
enum SimpleBound {
TraitBound(Vec<PathSegment>, Vec<SimpleBound>, Vec<GenericParamDef>, hir::TraitBoundModifier),
Outlives(Lifetime),
}
impl From<GenericBound> for SimpleBound {
fn from(bound: GenericBound) -> Self {
match bound.clone() {
GenericBound::Outlives(l) => SimpleBound::Outlives(l),
GenericBound::TraitBound(t, mod_) => match t.trait_ {
Type::ResolvedPath { path, param_names, .. } => SimpleBound::TraitBound(
path.segments,
param_names.map_or_else(
|| Vec::new(),
|v| v.iter().map(|p| SimpleBound::from(p.clone())).collect(),
),
t.generic_params,
mod_,
),
_ => panic!("Unexpected bound {:?}", bound),
},
}
}
}
| 39.285596 | 100 | 0.46415 |
1801eae1da677586523b19844ba1185d0fcafa9d | 2,603 | use std::mem::size_of;
use std::cmp::Ordering;
pub mod locksteparray;
#[cfg(test)]
pub mod test;
// TODO: Replace with `NonZero` when and if it stabilizes: rust-lang/rust#27730
#[derive(Copy, Clone)]
pub struct NonZeroUsize(&'static ());
impl NonZeroUsize {
#[inline]
pub unsafe fn new(value: usize) -> Self {
debug_assert!(value != 0, "usize was zero");
NonZeroUsize(&*(value as *const ()))
}
#[inline]
pub fn get(self) -> usize {
self.0 as *const () as usize
}
}
pub fn partial_read(array: &[u8]) -> usize {
debug_assert!(array.len() <= size_of::<usize>());
array.iter()
.fold(0, |acc, byte| (acc << 8) | *byte as usize)
}
pub fn partial_write(array: &mut [u8], mut value: usize) {
for byte in array.iter_mut().rev() {
*byte = (value & 0xff) as u8;
value >>= 8;
}
debug_assert_eq!(value, 0, "Remaining value");
}
pub trait SliceExt {
type Item;
fn linear_search(&self, key: &Self::Item) -> Result<usize, usize>
where Self::Item: Ord;
}
impl<T> SliceExt for [T] {
type Item = T;
// Linear search through a sorted slice
fn linear_search(&self, key: &T) -> Result<usize, usize>
where T: Ord {
for (i, item) in self.iter().enumerate() {
match item.cmp(key) {
Ordering::Less => {},
Ordering::Equal => {
return Ok(i);
},
Ordering::Greater => {
return Err(i);
}
}
}
Err(self.len())
}
}
#[test]
fn test_partial_read() {
let array = [1u8, 2u8];
assert_eq!(partial_read(&array[..]), 0x102);
}
#[test]
fn test_partial_write() {
let mut array = [0u8; 2];
partial_write(&mut array[..], 0x102);
assert_eq!(&array[..], &[0x1, 0x2][..]);
}
#[test]
fn test_read_write() {
fn test_one(value: usize, size: usize) {
#[cfg(target_pointer_width = "32")]
let mut array = [0xffu8; 4];
#[cfg(target_pointer_width = "64")]
let mut array = [0xffu8; 8];
partial_write(&mut array[..size], value);
assert_eq!(partial_read(&array[..size]), value);
}
test_one(0x0201, 2);
test_one(0x32659374, 4);
}
#[test]
fn test_find_item() {
let array = [0, 1, 2, 3];
assert_eq!(array.linear_search(&2), Ok(2));
}
#[test]
fn test_find_open() {
let array = [0, 1, 1, 3];
assert_eq!(array.linear_search(&2), Err(3));
}
#[test]
fn test_find_end_open() {
let array = [0, 1, 2, 3];
assert_eq!(array.linear_search(&4), Err(4))
}
| 23.663636 | 79 | 0.546677 |
f8b5254dc5bbe3350054767a597b9a1c3ef74468 | 935 | // arc1.rs
// Make this code compile by filling in a value for `shared_numbers` where the
// TODO comment is and create an initial binding for `child_numbers`
// somewhere. Try not to create any copies of the `numbers` Vec!
// Execute `rustlings hint arc1` for hints :)
use std::sync::Arc;
use std::thread;
fn main() {
let numbers: Vec<_> = (0..100u32).collect();
let shared_numbers = Arc::new(numbers);// TODO
let mut joinhandles = Vec::new();
for offset in 0..8 {
let child_numbers = shared_numbers.clone();
joinhandles.push(thread::spawn(move || {
let mut i = offset;
let mut sum = 0;
while i < child_numbers.len() {
sum += child_numbers[i];
i += 5;
}
println!("Sum of offset {} is {}", offset, sum);
}));
}
for handle in joinhandles.into_iter() {
handle.join().unwrap();
}
}
| 29.21875 | 78 | 0.571123 |
87b65e558bfaa49ef11a9cb23e025714550add7e | 453 | extern crate bcrypt;
use crate::schema::users;
use bcrypt::verify;
use diesel::{Identifiable, Queryable};
#[derive(Queryable, Clone, Identifiable)]
#[table_name = "users"]
pub struct User {
pub id: i32,
pub username: String,
pub email: String,
pub password: String,
pub unique_id: String,
}
impl User {
pub fn verify(self, password: String) -> bool {
return verify(password.as_str(), &self.password).unwrap();
}
}
| 20.590909 | 66 | 0.662252 |
5022ff463efff11628fdc40dd8aa17da6c6b6be6 | 5,780 | //! Returns a list of channels (users who have streamed within the past 6 months) that match the query via channel name or description either entirely or partially.
//! [`search-channels`](https://dev.twitch.tv/docs/api/reference#search-channels)
//!
//! # Accessing the endpoint
//!
//! ## Request: [SearchChannelsRequest]
//!
//! To use this endpoint, construct a [`SearchChannelsRequest`] with the [`SearchChannelsRequest::builder()`] method.
//!
//! ```rust, no_run
//! use twitch_api2::helix::search::search_channels;
//! let request = search_channels::SearchChannelsRequest::builder()
//! .query("hello")
//! .build();
//! ```
//!
//! ## Response: [Channel]
//!
//! Send the request to receive the response with [`HelixClient::req_get()`](helix::HelixClient::req_get).
//!
//! ```rust, no_run
//! use twitch_api2::helix::{self, search::search_channels};
//! # use twitch_api2::client;
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
//! # let client: helix::HelixClient<'static, client::DummyHttpClient> = helix::HelixClient::default();
//! # let token = twitch_oauth2::AccessToken::new("validtoken".to_string());
//! # let token = twitch_oauth2::UserToken::from_existing(twitch_oauth2::dummy_http_client, token, None, None).await?;
//! let request = search_channels::SearchChannelsRequest::builder()
//! .query("hello")
//! .build();
//! let response: Vec<search_channels::Channel> = client.req_get(request, &token).await?.data;
//! # Ok(())
//! # }
//! ```
//!
//! You can also get the [`http::Request`] with [`request.create_request(&token, &client_id)`](helix::RequestGet::create_request)
//! and parse the [`http::Response`] with [`SearchChannelsRequest::parse_response(None, &request.get_uri(), response)`](SearchChannelsRequest::parse_response)
use super::*;
use helix::RequestGet;
/// Query Parameters for [Search Channels](super::search_channels)
///
/// [`search-channels`](https://dev.twitch.tv/docs/api/reference#search-channels)
#[derive(PartialEq, typed_builder::TypedBuilder, Deserialize, Serialize, Clone, Debug)]
#[non_exhaustive]
pub struct SearchChannelsRequest {
/// URL encoded search query
#[builder(setter(into))]
pub query: String,
/// Cursor for forward pagination: tells the server where to start fetching the next set of results, in a multi-page response. The cursor value specified here is from the pagination response field of a prior query.
#[builder(default)]
pub after: Option<helix::Cursor>,
/// Maximum number of objects to return. Maximum: 100 Default: 20
#[builder(default)] // FIXME: No setter because int
pub first: Option<usize>,
/// Filter results for live streams only. Default: false
#[builder(default, setter(into))]
pub live_only: Option<bool>,
}
/// Return Values for [Search Channels](super::search_channels)
///
/// [`search-channels`](https://dev.twitch.tv/docs/api/reference#search-channels)
#[derive(PartialEq, Deserialize, Debug, Clone)]
#[cfg_attr(feature = "deny_unknown_fields", serde(deny_unknown_fields))]
#[non_exhaustive]
pub struct Channel {
/// ID of the game being played on the stream
pub game_id: types::CategoryId,
/// Name of the game being played on the stream.
pub game_name: String,
/// Channel ID
pub id: types::UserId,
/// Display name corresponding to user_id
pub display_name: types::DisplayName,
/// Channel language (Broadcaster Language field from the [Channels service][crate::helix::channels])
pub broadcaster_language: String,
/// Login of the broadcaster.
pub broadcaster_login: types::UserName,
/// channel title
pub title: String,
/// Thumbnail URL of the stream. All image URLs have variable width and height. You can replace {width} and {height} with any values to get that size image.
pub thumbnail_url: String,
/// Live status
pub is_live: bool,
/// UTC timestamp. (live only)
pub started_at: types::Timestamp,
// FIXME: Twitch doc say tag_ids
/// Shows tag IDs that apply to the stream (live only).See <https://www.twitch.tv/directory/all/tags> for tag types
pub tag_ids: Vec<types::TagId>,
}
impl Request for SearchChannelsRequest {
type Response = Vec<Channel>;
const PATH: &'static str = "search/channels";
#[cfg(feature = "twitch_oauth2")]
const SCOPE: &'static [twitch_oauth2::Scope] = &[];
}
impl RequestGet for SearchChannelsRequest {}
impl helix::Paginated for SearchChannelsRequest {
fn set_pagination(&mut self, cursor: Option<helix::Cursor>) { self.after = cursor }
}
#[test]
fn test_request() {
use helix::*;
let req = SearchChannelsRequest::builder().query("fort").build();
// From twitch docs
let data = br#"
{
"data": [
{
"broadcaster_language": "en",
"broadcaster_login": "a_seagull",
"display_name": "A_Seagull",
"game_id": "506442",
"game_name": "DOOM Eternal",
"id": "19070311",
"is_live": true,
"tag_ids": [
"6ea6bca4-4712-4ab9-a906-e3336a9d8039"
],
"thumbnail_url": "https://static-cdn.jtvnw.net/jtv_user_pictures/a_seagull-profile_image-4d2d235688c7dc66-300x300.png",
"title": "a_seagull",
"started_at": "2020-03-18T17:56:00Z"
}
],
"pagination": {}
}
"#
.to_vec();
let http_response = http::Response::builder().body(data).unwrap();
let uri = req.get_uri().unwrap();
assert_eq!(
uri.to_string(),
"https://api.twitch.tv/helix/search/channels?query=fort"
);
dbg!(SearchChannelsRequest::parse_response(Some(req), &uri, http_response).unwrap());
}
| 38.791946 | 218 | 0.663149 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.